var/home/core/zuul-output/0000755000175000017500000000000015066745023014535 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015066755577015520 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005041765515066755566017737 0ustar rootrootSep 30 12:20:24 crc systemd[1]: Starting Kubernetes Kubelet... Sep 30 12:20:24 crc restorecon[4699]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 12:20:24 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 12:20:25 crc restorecon[4699]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 12:20:25 crc restorecon[4699]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Sep 30 12:20:26 crc kubenswrapper[5002]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 30 12:20:26 crc kubenswrapper[5002]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Sep 30 12:20:26 crc kubenswrapper[5002]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 30 12:20:26 crc kubenswrapper[5002]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 30 12:20:26 crc kubenswrapper[5002]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Sep 30 12:20:26 crc kubenswrapper[5002]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.409963 5002 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417667 5002 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417699 5002 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417711 5002 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417721 5002 feature_gate.go:330] unrecognized feature gate: Example Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417730 5002 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417740 5002 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417748 5002 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417758 5002 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417767 5002 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417775 5002 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417783 5002 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417791 5002 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417799 5002 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417807 5002 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417815 5002 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417823 5002 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417830 5002 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417840 5002 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417848 5002 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417856 5002 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417864 5002 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417871 5002 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417879 5002 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417887 5002 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417895 5002 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417903 5002 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417910 5002 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417918 5002 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417926 5002 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417934 5002 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417941 5002 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417949 5002 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417957 5002 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417964 5002 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417977 5002 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417987 5002 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.417996 5002 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418005 5002 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418014 5002 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418023 5002 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418032 5002 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418041 5002 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418049 5002 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418057 5002 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418065 5002 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418073 5002 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418081 5002 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418088 5002 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418096 5002 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418103 5002 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418111 5002 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418119 5002 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418127 5002 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418134 5002 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418141 5002 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418149 5002 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418156 5002 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418164 5002 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418171 5002 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418178 5002 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418186 5002 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418197 5002 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418207 5002 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418216 5002 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418226 5002 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418236 5002 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418244 5002 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418253 5002 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418293 5002 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418303 5002 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.418314 5002 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418468 5002 flags.go:64] FLAG: --address="0.0.0.0" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418512 5002 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418528 5002 flags.go:64] FLAG: --anonymous-auth="true" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418540 5002 flags.go:64] FLAG: --application-metrics-count-limit="100" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418551 5002 flags.go:64] FLAG: --authentication-token-webhook="false" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418560 5002 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418572 5002 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418583 5002 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418592 5002 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418601 5002 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418611 5002 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418621 5002 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418629 5002 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418638 5002 flags.go:64] FLAG: --cgroup-root="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418647 5002 flags.go:64] FLAG: --cgroups-per-qos="true" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418657 5002 flags.go:64] FLAG: --client-ca-file="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418665 5002 flags.go:64] FLAG: --cloud-config="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418675 5002 flags.go:64] FLAG: --cloud-provider="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418683 5002 flags.go:64] FLAG: --cluster-dns="[]" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418694 5002 flags.go:64] FLAG: --cluster-domain="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418703 5002 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418712 5002 flags.go:64] FLAG: --config-dir="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418721 5002 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418733 5002 flags.go:64] FLAG: --container-log-max-files="5" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418754 5002 flags.go:64] FLAG: --container-log-max-size="10Mi" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418765 5002 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418774 5002 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418784 5002 flags.go:64] FLAG: --containerd-namespace="k8s.io" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418793 5002 flags.go:64] FLAG: --contention-profiling="false" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418801 5002 flags.go:64] FLAG: --cpu-cfs-quota="true" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418810 5002 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418820 5002 flags.go:64] FLAG: --cpu-manager-policy="none" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418830 5002 flags.go:64] FLAG: --cpu-manager-policy-options="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418841 5002 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418850 5002 flags.go:64] FLAG: --enable-controller-attach-detach="true" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418858 5002 flags.go:64] FLAG: --enable-debugging-handlers="true" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418868 5002 flags.go:64] FLAG: --enable-load-reader="false" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418877 5002 flags.go:64] FLAG: --enable-server="true" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418885 5002 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418897 5002 flags.go:64] FLAG: --event-burst="100" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418907 5002 flags.go:64] FLAG: --event-qps="50" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418916 5002 flags.go:64] FLAG: --event-storage-age-limit="default=0" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418925 5002 flags.go:64] FLAG: --event-storage-event-limit="default=0" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418934 5002 flags.go:64] FLAG: --eviction-hard="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418944 5002 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418953 5002 flags.go:64] FLAG: --eviction-minimum-reclaim="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418961 5002 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418971 5002 flags.go:64] FLAG: --eviction-soft="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418980 5002 flags.go:64] FLAG: --eviction-soft-grace-period="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418989 5002 flags.go:64] FLAG: --exit-on-lock-contention="false" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.418998 5002 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419006 5002 flags.go:64] FLAG: --experimental-mounter-path="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419015 5002 flags.go:64] FLAG: --fail-cgroupv1="false" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419024 5002 flags.go:64] FLAG: --fail-swap-on="true" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419033 5002 flags.go:64] FLAG: --feature-gates="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419044 5002 flags.go:64] FLAG: --file-check-frequency="20s" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419056 5002 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419065 5002 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419076 5002 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419087 5002 flags.go:64] FLAG: --healthz-port="10248" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419096 5002 flags.go:64] FLAG: --help="false" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419105 5002 flags.go:64] FLAG: --hostname-override="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419113 5002 flags.go:64] FLAG: --housekeeping-interval="10s" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419123 5002 flags.go:64] FLAG: --http-check-frequency="20s" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419155 5002 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419166 5002 flags.go:64] FLAG: --image-credential-provider-config="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419175 5002 flags.go:64] FLAG: --image-gc-high-threshold="85" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419185 5002 flags.go:64] FLAG: --image-gc-low-threshold="80" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419195 5002 flags.go:64] FLAG: --image-service-endpoint="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419204 5002 flags.go:64] FLAG: --kernel-memcg-notification="false" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419213 5002 flags.go:64] FLAG: --kube-api-burst="100" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419222 5002 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419231 5002 flags.go:64] FLAG: --kube-api-qps="50" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419241 5002 flags.go:64] FLAG: --kube-reserved="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419250 5002 flags.go:64] FLAG: --kube-reserved-cgroup="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419259 5002 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419268 5002 flags.go:64] FLAG: --kubelet-cgroups="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419276 5002 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419285 5002 flags.go:64] FLAG: --lock-file="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419294 5002 flags.go:64] FLAG: --log-cadvisor-usage="false" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419304 5002 flags.go:64] FLAG: --log-flush-frequency="5s" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419313 5002 flags.go:64] FLAG: --log-json-info-buffer-size="0" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419325 5002 flags.go:64] FLAG: --log-json-split-stream="false" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419335 5002 flags.go:64] FLAG: --log-text-info-buffer-size="0" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419344 5002 flags.go:64] FLAG: --log-text-split-stream="false" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419353 5002 flags.go:64] FLAG: --logging-format="text" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419361 5002 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419371 5002 flags.go:64] FLAG: --make-iptables-util-chains="true" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419388 5002 flags.go:64] FLAG: --manifest-url="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419397 5002 flags.go:64] FLAG: --manifest-url-header="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419409 5002 flags.go:64] FLAG: --max-housekeeping-interval="15s" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419419 5002 flags.go:64] FLAG: --max-open-files="1000000" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419430 5002 flags.go:64] FLAG: --max-pods="110" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419439 5002 flags.go:64] FLAG: --maximum-dead-containers="-1" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419449 5002 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419458 5002 flags.go:64] FLAG: --memory-manager-policy="None" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419466 5002 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419502 5002 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419512 5002 flags.go:64] FLAG: --node-ip="192.168.126.11" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419522 5002 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419541 5002 flags.go:64] FLAG: --node-status-max-images="50" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419550 5002 flags.go:64] FLAG: --node-status-update-frequency="10s" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419559 5002 flags.go:64] FLAG: --oom-score-adj="-999" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419568 5002 flags.go:64] FLAG: --pod-cidr="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419579 5002 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419591 5002 flags.go:64] FLAG: --pod-manifest-path="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419600 5002 flags.go:64] FLAG: --pod-max-pids="-1" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419609 5002 flags.go:64] FLAG: --pods-per-core="0" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419618 5002 flags.go:64] FLAG: --port="10250" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419627 5002 flags.go:64] FLAG: --protect-kernel-defaults="false" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419636 5002 flags.go:64] FLAG: --provider-id="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419645 5002 flags.go:64] FLAG: --qos-reserved="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419654 5002 flags.go:64] FLAG: --read-only-port="10255" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419662 5002 flags.go:64] FLAG: --register-node="true" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419671 5002 flags.go:64] FLAG: --register-schedulable="true" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419680 5002 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419699 5002 flags.go:64] FLAG: --registry-burst="10" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419707 5002 flags.go:64] FLAG: --registry-qps="5" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419716 5002 flags.go:64] FLAG: --reserved-cpus="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419725 5002 flags.go:64] FLAG: --reserved-memory="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419737 5002 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419746 5002 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419755 5002 flags.go:64] FLAG: --rotate-certificates="false" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419764 5002 flags.go:64] FLAG: --rotate-server-certificates="false" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419773 5002 flags.go:64] FLAG: --runonce="false" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419782 5002 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419791 5002 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419800 5002 flags.go:64] FLAG: --seccomp-default="false" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419808 5002 flags.go:64] FLAG: --serialize-image-pulls="true" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419817 5002 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419827 5002 flags.go:64] FLAG: --storage-driver-db="cadvisor" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419835 5002 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419844 5002 flags.go:64] FLAG: --storage-driver-password="root" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419853 5002 flags.go:64] FLAG: --storage-driver-secure="false" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419862 5002 flags.go:64] FLAG: --storage-driver-table="stats" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419871 5002 flags.go:64] FLAG: --storage-driver-user="root" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419880 5002 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419889 5002 flags.go:64] FLAG: --sync-frequency="1m0s" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419898 5002 flags.go:64] FLAG: --system-cgroups="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419907 5002 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419921 5002 flags.go:64] FLAG: --system-reserved-cgroup="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419930 5002 flags.go:64] FLAG: --tls-cert-file="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419939 5002 flags.go:64] FLAG: --tls-cipher-suites="[]" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419949 5002 flags.go:64] FLAG: --tls-min-version="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419958 5002 flags.go:64] FLAG: --tls-private-key-file="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419967 5002 flags.go:64] FLAG: --topology-manager-policy="none" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419976 5002 flags.go:64] FLAG: --topology-manager-policy-options="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419985 5002 flags.go:64] FLAG: --topology-manager-scope="container" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.419994 5002 flags.go:64] FLAG: --v="2" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.420005 5002 flags.go:64] FLAG: --version="false" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.420016 5002 flags.go:64] FLAG: --vmodule="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.420026 5002 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.420036 5002 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420237 5002 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420247 5002 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420256 5002 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420265 5002 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420273 5002 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420282 5002 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420290 5002 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420298 5002 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420306 5002 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420314 5002 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420322 5002 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420330 5002 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420337 5002 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420345 5002 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420353 5002 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420361 5002 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420368 5002 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420376 5002 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420384 5002 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420392 5002 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420399 5002 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420410 5002 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420420 5002 feature_gate.go:330] unrecognized feature gate: Example Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420435 5002 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420445 5002 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420454 5002 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420463 5002 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420496 5002 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420504 5002 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420512 5002 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420521 5002 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420529 5002 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420537 5002 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420544 5002 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420552 5002 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420560 5002 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420570 5002 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420579 5002 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420589 5002 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420600 5002 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420610 5002 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420618 5002 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420629 5002 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420639 5002 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420647 5002 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420657 5002 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420665 5002 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420673 5002 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420681 5002 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420689 5002 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420698 5002 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420706 5002 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420714 5002 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420721 5002 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420729 5002 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420737 5002 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420745 5002 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420755 5002 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420764 5002 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420773 5002 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420781 5002 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420789 5002 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420797 5002 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420805 5002 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420812 5002 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420821 5002 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420828 5002 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420836 5002 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420843 5002 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420851 5002 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.420858 5002 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.420884 5002 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.433334 5002 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.433373 5002 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433542 5002 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433560 5002 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433573 5002 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433586 5002 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433596 5002 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433606 5002 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433616 5002 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433626 5002 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433637 5002 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433647 5002 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433656 5002 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433670 5002 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433685 5002 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433696 5002 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433710 5002 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433719 5002 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433727 5002 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433734 5002 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433743 5002 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433751 5002 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433759 5002 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433767 5002 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433774 5002 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433782 5002 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433790 5002 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433798 5002 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433805 5002 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433813 5002 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433822 5002 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433833 5002 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433843 5002 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433852 5002 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433860 5002 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433870 5002 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433880 5002 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433889 5002 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433898 5002 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433907 5002 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433915 5002 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433924 5002 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433934 5002 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433943 5002 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433951 5002 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433959 5002 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433967 5002 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433975 5002 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433983 5002 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433990 5002 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.433998 5002 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434005 5002 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434014 5002 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434022 5002 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434029 5002 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434037 5002 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434044 5002 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434052 5002 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434060 5002 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434068 5002 feature_gate.go:330] unrecognized feature gate: Example Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434077 5002 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434085 5002 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434092 5002 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434100 5002 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434108 5002 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434115 5002 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434123 5002 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434130 5002 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434138 5002 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434146 5002 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434156 5002 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434165 5002 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434173 5002 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.434186 5002 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434452 5002 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434467 5002 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434503 5002 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434512 5002 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434522 5002 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434530 5002 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434538 5002 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434546 5002 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434553 5002 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434561 5002 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434568 5002 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434576 5002 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434584 5002 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434591 5002 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434602 5002 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434612 5002 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434622 5002 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434632 5002 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434642 5002 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434655 5002 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434669 5002 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434681 5002 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434691 5002 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434703 5002 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434712 5002 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434722 5002 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434732 5002 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434741 5002 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434751 5002 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434761 5002 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434770 5002 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434780 5002 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434790 5002 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434799 5002 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434813 5002 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434826 5002 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434837 5002 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434847 5002 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434857 5002 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434867 5002 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434878 5002 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434888 5002 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434899 5002 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434908 5002 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434918 5002 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434929 5002 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434939 5002 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434949 5002 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434958 5002 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434972 5002 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434985 5002 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.434995 5002 feature_gate.go:330] unrecognized feature gate: Example Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.435004 5002 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.435014 5002 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.435023 5002 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.435033 5002 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.435042 5002 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.435052 5002 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.435061 5002 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.435071 5002 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.435080 5002 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.435090 5002 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.435101 5002 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.435110 5002 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.435120 5002 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.435133 5002 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.435146 5002 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.435158 5002 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.435169 5002 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.435178 5002 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.435186 5002 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.435199 5002 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.435449 5002 server.go:940] "Client rotation is on, will bootstrap in background" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.442783 5002 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.442929 5002 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.444957 5002 server.go:997] "Starting client certificate rotation" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.445018 5002 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.445309 5002 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-20 18:46:30.082902749 +0000 UTC Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.445432 5002 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1950h26m3.637478126s for next certificate rotation Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.479846 5002 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.484757 5002 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.505907 5002 log.go:25] "Validated CRI v1 runtime API" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.556643 5002 log.go:25] "Validated CRI v1 image API" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.559557 5002 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.566807 5002 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-09-30-12-15-18-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.566845 5002 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.582914 5002 manager.go:217] Machine: {Timestamp:2025-09-30 12:20:26.58089832 +0000 UTC m=+0.830580506 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799998 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:2730bf9d-d559-45ca-96f1-192133954467 BootID:2268e3e5-9437-4733-80ec-6085372d1c27 Filesystems:[{Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:82:ea:7e Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:82:ea:7e Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:5a:73:26 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:2d:8a:b5 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:d6:6a:ee Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:ff:8c:0a Speed:-1 Mtu:1496} {Name:eth10 MacAddress:ee:9b:a0:cb:e3:fd Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:ba:b3:2e:ba:a3:5c Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.583187 5002 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.583309 5002 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.583618 5002 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.583861 5002 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.583913 5002 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.584193 5002 topology_manager.go:138] "Creating topology manager with none policy" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.584212 5002 container_manager_linux.go:303] "Creating device plugin manager" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.584667 5002 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.584706 5002 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.585593 5002 state_mem.go:36] "Initialized new in-memory state store" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.585775 5002 server.go:1245] "Using root directory" path="/var/lib/kubelet" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.589804 5002 kubelet.go:418] "Attempting to sync node with API server" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.589864 5002 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.589923 5002 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.589953 5002 kubelet.go:324] "Adding apiserver pod source" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.589978 5002 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.596224 5002 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.597432 5002 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.600081 5002 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Sep 30 12:20:26 crc kubenswrapper[5002]: E0930 12:20:26.600200 5002 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.194:6443: connect: connection refused" logger="UnhandledError" Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.600319 5002 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Sep 30 12:20:26 crc kubenswrapper[5002]: E0930 12:20:26.601358 5002 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.194:6443: connect: connection refused" logger="UnhandledError" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.601774 5002 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.603468 5002 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.603507 5002 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.603516 5002 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.603523 5002 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.603534 5002 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.603562 5002 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.603571 5002 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.603583 5002 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.603592 5002 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.603600 5002 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.603610 5002 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.603617 5002 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.603641 5002 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.604028 5002 server.go:1280] "Started kubelet" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.604100 5002 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.604337 5002 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.605004 5002 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.605142 5002 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Sep 30 12:20:26 crc systemd[1]: Started Kubernetes Kubelet. Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.606103 5002 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.606137 5002 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.606358 5002 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 23:48:57.586001662 +0000 UTC Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.606405 5002 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 2459h28m30.979600712s for next certificate rotation Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.606504 5002 volume_manager.go:287] "The desired_state_of_world populator starts" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.606526 5002 volume_manager.go:289] "Starting Kubelet Volume Manager" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.606650 5002 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Sep 30 12:20:26 crc kubenswrapper[5002]: E0930 12:20:26.607255 5002 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.608275 5002 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Sep 30 12:20:26 crc kubenswrapper[5002]: E0930 12:20:26.608371 5002 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.194:6443: connect: connection refused" logger="UnhandledError" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.608592 5002 factory.go:153] Registering CRI-O factory Sep 30 12:20:26 crc kubenswrapper[5002]: E0930 12:20:26.609265 5002 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" interval="200ms" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.609415 5002 factory.go:221] Registration of the crio container factory successfully Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.609519 5002 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.609547 5002 factory.go:55] Registering systemd factory Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.609553 5002 factory.go:221] Registration of the systemd container factory successfully Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.609576 5002 factory.go:103] Registering Raw factory Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.609588 5002 manager.go:1196] Started watching for new ooms in manager Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.610797 5002 server.go:460] "Adding debug handlers to kubelet server" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.610943 5002 manager.go:319] Starting recovery of all containers Sep 30 12:20:26 crc kubenswrapper[5002]: E0930 12:20:26.614145 5002 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.194:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186a0ec1043a46f4 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-09-30 12:20:26.604005108 +0000 UTC m=+0.853687254,LastTimestamp:2025-09-30 12:20:26.604005108 +0000 UTC m=+0.853687254,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.620208 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.620273 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.620292 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.620326 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.623697 5002 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.623754 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.623782 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.623797 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.623817 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.623836 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.623867 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.623883 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.623898 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.623916 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.623939 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.623952 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.623965 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.623982 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.623998 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.624015 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.624030 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.624055 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.624073 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.624087 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.624100 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.624117 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.624130 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.624153 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.624174 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.624188 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.624464 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.624524 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.624580 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.624598 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.624734 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.624776 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.624811 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.624834 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.624900 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.624929 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.624949 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.624980 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625003 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625024 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625051 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625073 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625122 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625144 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625166 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625195 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625216 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625243 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625263 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625300 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625324 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625358 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625389 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625413 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625443 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625464 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625516 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625544 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625564 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625590 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625610 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625633 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625658 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625677 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625702 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625721 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625742 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625767 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625795 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625824 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625844 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625862 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625963 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.625986 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626012 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626031 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626050 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626077 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626097 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626125 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626146 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626164 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626188 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626208 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626226 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626274 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626295 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626361 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626382 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626402 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626430 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626450 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626500 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626520 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626542 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626569 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626589 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626629 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626650 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626671 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626699 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626736 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626760 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626805 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626836 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626859 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626889 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626918 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.626971 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.627010 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.627041 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.627068 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.627089 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.627114 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.627150 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.627171 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.627196 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.627223 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.627249 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.627268 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.627301 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.627326 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.627344 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.627369 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.627390 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.627409 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629328 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629411 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629430 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629445 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629460 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629492 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629506 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629521 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629535 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629548 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629560 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629573 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629586 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629599 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629613 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629626 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629642 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629655 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629669 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629683 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629696 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629708 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629721 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629735 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629747 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629762 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629776 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629789 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629804 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629818 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629830 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629843 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629857 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629871 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629884 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629897 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629909 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629922 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629937 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629950 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629963 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629975 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.629988 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630001 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630015 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630027 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630042 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630055 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630069 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630081 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630094 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630110 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630123 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630137 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630151 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630165 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630181 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630198 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630217 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630233 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630248 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630267 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630285 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630299 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630312 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630365 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630379 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630395 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630408 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630422 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630436 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630450 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630464 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630502 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630535 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630554 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630586 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630602 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630623 5002 reconstruct.go:97] "Volume reconstruction finished" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.630632 5002 reconciler.go:26] "Reconciler: start to sync state" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.641582 5002 manager.go:324] Recovery completed Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.654867 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.658107 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.658159 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.658170 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.659558 5002 cpu_manager.go:225] "Starting CPU manager" policy="none" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.659578 5002 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.659609 5002 state_mem.go:36] "Initialized new in-memory state store" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.671766 5002 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.674742 5002 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.674785 5002 status_manager.go:217] "Starting to sync pod status with apiserver" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.674825 5002 kubelet.go:2335] "Starting kubelet main sync loop" Sep 30 12:20:26 crc kubenswrapper[5002]: E0930 12:20:26.675000 5002 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Sep 30 12:20:26 crc kubenswrapper[5002]: W0930 12:20:26.677132 5002 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Sep 30 12:20:26 crc kubenswrapper[5002]: E0930 12:20:26.677222 5002 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.194:6443: connect: connection refused" logger="UnhandledError" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.679816 5002 policy_none.go:49] "None policy: Start" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.681440 5002 memory_manager.go:170] "Starting memorymanager" policy="None" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.681578 5002 state_mem.go:35] "Initializing new in-memory state store" Sep 30 12:20:26 crc kubenswrapper[5002]: E0930 12:20:26.708140 5002 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.733888 5002 manager.go:334] "Starting Device Plugin manager" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.734086 5002 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.734113 5002 server.go:79] "Starting device plugin registration server" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.734805 5002 eviction_manager.go:189] "Eviction manager: starting control loop" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.734845 5002 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.735227 5002 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.735422 5002 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.735443 5002 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Sep 30 12:20:26 crc kubenswrapper[5002]: E0930 12:20:26.743090 5002 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.775448 5002 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.775590 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.777217 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.777295 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.777316 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.777576 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.778137 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.778230 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.778981 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.779012 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.779024 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.779236 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.779558 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.779638 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.779743 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.779774 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.779792 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.780750 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.780797 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.780808 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.780840 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.780894 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.780918 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.780919 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.781085 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.781147 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.781820 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.781871 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.781885 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.782107 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.782235 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.782296 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.782604 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.782643 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.782665 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.782924 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.782944 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.782957 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.783071 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.783103 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.783126 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.783176 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.783200 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.784153 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.784183 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.784212 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:26 crc kubenswrapper[5002]: E0930 12:20:26.811114 5002 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" interval="400ms" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.832923 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.832976 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.833011 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.833040 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.833064 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.833090 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.833118 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.833147 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.833181 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.833218 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.833244 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.833271 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.833296 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.833320 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.833709 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.836457 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.838581 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.838641 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.838664 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.838701 5002 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 12:20:26 crc kubenswrapper[5002]: E0930 12:20:26.839232 5002 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.194:6443: connect: connection refused" node="crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.938338 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.938682 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.938713 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.938835 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.938878 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.938979 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.938977 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.938978 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.939008 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.939342 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.939424 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.939520 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.939036 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.939547 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.939648 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.939683 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.939717 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.939747 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.939775 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.939804 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.939830 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.939985 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.940021 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.940047 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.940067 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.939506 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.940102 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.940121 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.939991 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 12:20:26 crc kubenswrapper[5002]: I0930 12:20:26.940304 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 12:20:27 crc kubenswrapper[5002]: I0930 12:20:27.040200 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:27 crc kubenswrapper[5002]: I0930 12:20:27.041822 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:27 crc kubenswrapper[5002]: I0930 12:20:27.041863 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:27 crc kubenswrapper[5002]: I0930 12:20:27.041882 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:27 crc kubenswrapper[5002]: I0930 12:20:27.041914 5002 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 12:20:27 crc kubenswrapper[5002]: E0930 12:20:27.042369 5002 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.194:6443: connect: connection refused" node="crc" Sep 30 12:20:27 crc kubenswrapper[5002]: I0930 12:20:27.121998 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 12:20:27 crc kubenswrapper[5002]: I0930 12:20:27.141250 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 12:20:27 crc kubenswrapper[5002]: I0930 12:20:27.142766 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 30 12:20:27 crc kubenswrapper[5002]: I0930 12:20:27.162183 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 12:20:27 crc kubenswrapper[5002]: I0930 12:20:27.166076 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 12:20:27 crc kubenswrapper[5002]: W0930 12:20:27.170958 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-07f25053aeb810b96ef1e9ca43dcff8620ac4b3e772afc5a7a3717b39f6e2a1f WatchSource:0}: Error finding container 07f25053aeb810b96ef1e9ca43dcff8620ac4b3e772afc5a7a3717b39f6e2a1f: Status 404 returned error can't find the container with id 07f25053aeb810b96ef1e9ca43dcff8620ac4b3e772afc5a7a3717b39f6e2a1f Sep 30 12:20:27 crc kubenswrapper[5002]: W0930 12:20:27.180941 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-7d346be3e6c12dce6a028e371294432a9f966d060763ead7ec3238ac0adeee6a WatchSource:0}: Error finding container 7d346be3e6c12dce6a028e371294432a9f966d060763ead7ec3238ac0adeee6a: Status 404 returned error can't find the container with id 7d346be3e6c12dce6a028e371294432a9f966d060763ead7ec3238ac0adeee6a Sep 30 12:20:27 crc kubenswrapper[5002]: W0930 12:20:27.188717 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-cd6730ce95995adba8375be64c3cf1e2d5568017d1d51b6b44e3783ec535f820 WatchSource:0}: Error finding container cd6730ce95995adba8375be64c3cf1e2d5568017d1d51b6b44e3783ec535f820: Status 404 returned error can't find the container with id cd6730ce95995adba8375be64c3cf1e2d5568017d1d51b6b44e3783ec535f820 Sep 30 12:20:27 crc kubenswrapper[5002]: W0930 12:20:27.193835 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-e144e1676c6c01d9033392aac33bc017d30fbf47174f1f972d0763739da3a17f WatchSource:0}: Error finding container e144e1676c6c01d9033392aac33bc017d30fbf47174f1f972d0763739da3a17f: Status 404 returned error can't find the container with id e144e1676c6c01d9033392aac33bc017d30fbf47174f1f972d0763739da3a17f Sep 30 12:20:27 crc kubenswrapper[5002]: W0930 12:20:27.194405 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-d2e20c69c6c556a333c6fb79fc5d6f55f4f26a0b64ef34cba4146a20cedc4b37 WatchSource:0}: Error finding container d2e20c69c6c556a333c6fb79fc5d6f55f4f26a0b64ef34cba4146a20cedc4b37: Status 404 returned error can't find the container with id d2e20c69c6c556a333c6fb79fc5d6f55f4f26a0b64ef34cba4146a20cedc4b37 Sep 30 12:20:27 crc kubenswrapper[5002]: E0930 12:20:27.212659 5002 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" interval="800ms" Sep 30 12:20:27 crc kubenswrapper[5002]: I0930 12:20:27.442622 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:27 crc kubenswrapper[5002]: I0930 12:20:27.444104 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:27 crc kubenswrapper[5002]: I0930 12:20:27.444143 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:27 crc kubenswrapper[5002]: I0930 12:20:27.444154 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:27 crc kubenswrapper[5002]: I0930 12:20:27.444180 5002 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 12:20:27 crc kubenswrapper[5002]: E0930 12:20:27.444681 5002 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.194:6443: connect: connection refused" node="crc" Sep 30 12:20:27 crc kubenswrapper[5002]: W0930 12:20:27.466068 5002 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Sep 30 12:20:27 crc kubenswrapper[5002]: E0930 12:20:27.466202 5002 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.194:6443: connect: connection refused" logger="UnhandledError" Sep 30 12:20:27 crc kubenswrapper[5002]: I0930 12:20:27.606226 5002 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Sep 30 12:20:27 crc kubenswrapper[5002]: I0930 12:20:27.682534 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d2e20c69c6c556a333c6fb79fc5d6f55f4f26a0b64ef34cba4146a20cedc4b37"} Sep 30 12:20:27 crc kubenswrapper[5002]: I0930 12:20:27.683890 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e144e1676c6c01d9033392aac33bc017d30fbf47174f1f972d0763739da3a17f"} Sep 30 12:20:27 crc kubenswrapper[5002]: I0930 12:20:27.684809 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"cd6730ce95995adba8375be64c3cf1e2d5568017d1d51b6b44e3783ec535f820"} Sep 30 12:20:27 crc kubenswrapper[5002]: I0930 12:20:27.685892 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"7d346be3e6c12dce6a028e371294432a9f966d060763ead7ec3238ac0adeee6a"} Sep 30 12:20:27 crc kubenswrapper[5002]: I0930 12:20:27.686941 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"07f25053aeb810b96ef1e9ca43dcff8620ac4b3e772afc5a7a3717b39f6e2a1f"} Sep 30 12:20:27 crc kubenswrapper[5002]: W0930 12:20:27.740750 5002 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Sep 30 12:20:27 crc kubenswrapper[5002]: E0930 12:20:27.740836 5002 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.194:6443: connect: connection refused" logger="UnhandledError" Sep 30 12:20:27 crc kubenswrapper[5002]: W0930 12:20:27.743496 5002 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Sep 30 12:20:27 crc kubenswrapper[5002]: E0930 12:20:27.743529 5002 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.194:6443: connect: connection refused" logger="UnhandledError" Sep 30 12:20:27 crc kubenswrapper[5002]: W0930 12:20:27.918235 5002 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Sep 30 12:20:27 crc kubenswrapper[5002]: E0930 12:20:27.918677 5002 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.194:6443: connect: connection refused" logger="UnhandledError" Sep 30 12:20:28 crc kubenswrapper[5002]: E0930 12:20:28.014280 5002 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" interval="1.6s" Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.245270 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.246963 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.247036 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.247061 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.247102 5002 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 12:20:28 crc kubenswrapper[5002]: E0930 12:20:28.247802 5002 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.194:6443: connect: connection refused" node="crc" Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.606163 5002 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.692798 5002 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="1f6f78afdbf3b3bd7c28515e9ff9ad08cdccd4f4f44106f8aa5375638b3e8ec9" exitCode=0 Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.692905 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"1f6f78afdbf3b3bd7c28515e9ff9ad08cdccd4f4f44106f8aa5375638b3e8ec9"} Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.692941 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.694406 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.694455 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.694500 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.696460 5002 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c" exitCode=0 Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.696622 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c"} Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.696754 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.698357 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.698399 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.698411 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.700595 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9"} Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.700646 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e"} Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.700649 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.700661 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9"} Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.702105 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56"} Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.700609 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.703195 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.703290 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.703373 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.703736 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.703783 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.703802 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.704890 5002 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb" exitCode=0 Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.705054 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.705069 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb"} Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.706633 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.706673 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.706690 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.711558 5002 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000" exitCode=0 Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.711659 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000"} Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.711724 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.713187 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.713241 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:28 crc kubenswrapper[5002]: I0930 12:20:28.713264 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.605700 5002 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Sep 30 12:20:29 crc kubenswrapper[5002]: E0930 12:20:29.615772 5002 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" interval="3.2s" Sep 30 12:20:29 crc kubenswrapper[5002]: W0930 12:20:29.629100 5002 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Sep 30 12:20:29 crc kubenswrapper[5002]: E0930 12:20:29.629175 5002 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.194:6443: connect: connection refused" logger="UnhandledError" Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.717623 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea"} Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.717673 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148"} Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.717684 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae"} Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.717697 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e"} Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.719239 5002 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596" exitCode=0 Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.719288 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596"} Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.719424 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.720386 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.720410 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.720418 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.722290 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"cd0dba52ee6bddc02187198fe6c409abf3d336df93b558b27fc7f76e7ec8f2b1"} Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.722314 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"96f9b5d0a287ad67a6f1008d06b0d371a522722280353ed9001f81198b9295ec"} Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.722325 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"6057b9d238b4d4dd1329f0ffdbfd88f2f8869c06e5eb681041229a15dd4a08dd"} Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.722393 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.723248 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.723271 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.723280 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.724686 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.725045 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.725097 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"b282a6a59ec67140cc73df77f9505388fb4ff97dfdedf8394eeb46d65a2bda8a"} Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.725305 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.725325 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.725333 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.725603 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.725628 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.725639 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.848294 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.849918 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.849990 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.850033 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:29 crc kubenswrapper[5002]: I0930 12:20:29.850069 5002 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 12:20:29 crc kubenswrapper[5002]: E0930 12:20:29.850733 5002 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.194:6443: connect: connection refused" node="crc" Sep 30 12:20:30 crc kubenswrapper[5002]: I0930 12:20:30.730230 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"55317dffb5016a5abff9cd21ab18f36efcc0fafffc774e14df49fb36e9d69d08"} Sep 30 12:20:30 crc kubenswrapper[5002]: I0930 12:20:30.730288 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:30 crc kubenswrapper[5002]: I0930 12:20:30.731466 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:30 crc kubenswrapper[5002]: I0930 12:20:30.731528 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:30 crc kubenswrapper[5002]: I0930 12:20:30.731543 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:30 crc kubenswrapper[5002]: I0930 12:20:30.734693 5002 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e" exitCode=0 Sep 30 12:20:30 crc kubenswrapper[5002]: I0930 12:20:30.734779 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e"} Sep 30 12:20:30 crc kubenswrapper[5002]: I0930 12:20:30.734882 5002 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 12:20:30 crc kubenswrapper[5002]: I0930 12:20:30.734936 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:30 crc kubenswrapper[5002]: I0930 12:20:30.734948 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:30 crc kubenswrapper[5002]: I0930 12:20:30.734902 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:30 crc kubenswrapper[5002]: I0930 12:20:30.736373 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:30 crc kubenswrapper[5002]: I0930 12:20:30.736526 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:30 crc kubenswrapper[5002]: I0930 12:20:30.736571 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:30 crc kubenswrapper[5002]: I0930 12:20:30.736595 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:30 crc kubenswrapper[5002]: I0930 12:20:30.736594 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:30 crc kubenswrapper[5002]: I0930 12:20:30.736735 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:30 crc kubenswrapper[5002]: I0930 12:20:30.736762 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:30 crc kubenswrapper[5002]: I0930 12:20:30.737576 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:30 crc kubenswrapper[5002]: I0930 12:20:30.737634 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:30 crc kubenswrapper[5002]: I0930 12:20:30.799892 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 12:20:30 crc kubenswrapper[5002]: I0930 12:20:30.800296 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:30 crc kubenswrapper[5002]: I0930 12:20:30.804098 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:30 crc kubenswrapper[5002]: I0930 12:20:30.804148 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:30 crc kubenswrapper[5002]: I0930 12:20:30.804175 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:31 crc kubenswrapper[5002]: I0930 12:20:31.184828 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 12:20:31 crc kubenswrapper[5002]: I0930 12:20:31.742519 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86"} Sep 30 12:20:31 crc kubenswrapper[5002]: I0930 12:20:31.742571 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:31 crc kubenswrapper[5002]: I0930 12:20:31.742598 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e"} Sep 30 12:20:31 crc kubenswrapper[5002]: I0930 12:20:31.742621 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e"} Sep 30 12:20:31 crc kubenswrapper[5002]: I0930 12:20:31.742707 5002 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 12:20:31 crc kubenswrapper[5002]: I0930 12:20:31.742803 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:31 crc kubenswrapper[5002]: I0930 12:20:31.743830 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:31 crc kubenswrapper[5002]: I0930 12:20:31.743864 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:31 crc kubenswrapper[5002]: I0930 12:20:31.743881 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:31 crc kubenswrapper[5002]: I0930 12:20:31.745100 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:31 crc kubenswrapper[5002]: I0930 12:20:31.745150 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:31 crc kubenswrapper[5002]: I0930 12:20:31.745161 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:31 crc kubenswrapper[5002]: I0930 12:20:31.898336 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 12:20:32 crc kubenswrapper[5002]: I0930 12:20:32.751936 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29"} Sep 30 12:20:32 crc kubenswrapper[5002]: I0930 12:20:32.751999 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:32 crc kubenswrapper[5002]: I0930 12:20:32.752007 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739"} Sep 30 12:20:32 crc kubenswrapper[5002]: I0930 12:20:32.752225 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:32 crc kubenswrapper[5002]: I0930 12:20:32.753033 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:32 crc kubenswrapper[5002]: I0930 12:20:32.753067 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:32 crc kubenswrapper[5002]: I0930 12:20:32.753085 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:32 crc kubenswrapper[5002]: I0930 12:20:32.754025 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:32 crc kubenswrapper[5002]: I0930 12:20:32.754081 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:32 crc kubenswrapper[5002]: I0930 12:20:32.754102 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:33 crc kubenswrapper[5002]: I0930 12:20:33.010064 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 12:20:33 crc kubenswrapper[5002]: I0930 12:20:33.051278 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:33 crc kubenswrapper[5002]: I0930 12:20:33.052817 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:33 crc kubenswrapper[5002]: I0930 12:20:33.052871 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:33 crc kubenswrapper[5002]: I0930 12:20:33.052895 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:33 crc kubenswrapper[5002]: I0930 12:20:33.052980 5002 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 12:20:33 crc kubenswrapper[5002]: I0930 12:20:33.541673 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Sep 30 12:20:33 crc kubenswrapper[5002]: I0930 12:20:33.701272 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 12:20:33 crc kubenswrapper[5002]: I0930 12:20:33.754746 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:33 crc kubenswrapper[5002]: I0930 12:20:33.754775 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:33 crc kubenswrapper[5002]: I0930 12:20:33.756312 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:33 crc kubenswrapper[5002]: I0930 12:20:33.756349 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:33 crc kubenswrapper[5002]: I0930 12:20:33.756364 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:33 crc kubenswrapper[5002]: I0930 12:20:33.756383 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:33 crc kubenswrapper[5002]: I0930 12:20:33.756439 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:33 crc kubenswrapper[5002]: I0930 12:20:33.756462 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:33 crc kubenswrapper[5002]: I0930 12:20:33.819094 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 12:20:33 crc kubenswrapper[5002]: I0930 12:20:33.819347 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:33 crc kubenswrapper[5002]: I0930 12:20:33.820964 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:33 crc kubenswrapper[5002]: I0930 12:20:33.820994 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:33 crc kubenswrapper[5002]: I0930 12:20:33.821005 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:34 crc kubenswrapper[5002]: I0930 12:20:34.185700 5002 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 30 12:20:34 crc kubenswrapper[5002]: I0930 12:20:34.185843 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 12:20:34 crc kubenswrapper[5002]: I0930 12:20:34.758109 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:34 crc kubenswrapper[5002]: I0930 12:20:34.758240 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:34 crc kubenswrapper[5002]: I0930 12:20:34.759557 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:34 crc kubenswrapper[5002]: I0930 12:20:34.759616 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:34 crc kubenswrapper[5002]: I0930 12:20:34.759635 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:34 crc kubenswrapper[5002]: I0930 12:20:34.760548 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:34 crc kubenswrapper[5002]: I0930 12:20:34.760645 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:34 crc kubenswrapper[5002]: I0930 12:20:34.760659 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:34 crc kubenswrapper[5002]: I0930 12:20:34.887635 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 12:20:34 crc kubenswrapper[5002]: I0930 12:20:34.887911 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:34 crc kubenswrapper[5002]: I0930 12:20:34.889735 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:34 crc kubenswrapper[5002]: I0930 12:20:34.889795 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:34 crc kubenswrapper[5002]: I0930 12:20:34.889811 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:35 crc kubenswrapper[5002]: I0930 12:20:35.826867 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 12:20:35 crc kubenswrapper[5002]: I0930 12:20:35.827103 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:35 crc kubenswrapper[5002]: I0930 12:20:35.828924 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:35 crc kubenswrapper[5002]: I0930 12:20:35.829068 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:35 crc kubenswrapper[5002]: I0930 12:20:35.829094 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:35 crc kubenswrapper[5002]: I0930 12:20:35.835724 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 12:20:36 crc kubenswrapper[5002]: E0930 12:20:36.743363 5002 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 30 12:20:36 crc kubenswrapper[5002]: I0930 12:20:36.767762 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:36 crc kubenswrapper[5002]: I0930 12:20:36.769100 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:36 crc kubenswrapper[5002]: I0930 12:20:36.769155 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:36 crc kubenswrapper[5002]: I0930 12:20:36.769176 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:37 crc kubenswrapper[5002]: I0930 12:20:37.997516 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Sep 30 12:20:37 crc kubenswrapper[5002]: I0930 12:20:37.997723 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:37 crc kubenswrapper[5002]: I0930 12:20:37.998764 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:37 crc kubenswrapper[5002]: I0930 12:20:37.998801 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:37 crc kubenswrapper[5002]: I0930 12:20:37.998812 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:40 crc kubenswrapper[5002]: W0930 12:20:40.202751 5002 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Sep 30 12:20:40 crc kubenswrapper[5002]: I0930 12:20:40.202870 5002 trace.go:236] Trace[826352974]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Sep-2025 12:20:30.201) (total time: 10001ms): Sep 30 12:20:40 crc kubenswrapper[5002]: Trace[826352974]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (12:20:40.202) Sep 30 12:20:40 crc kubenswrapper[5002]: Trace[826352974]: [10.001389398s] [10.001389398s] END Sep 30 12:20:40 crc kubenswrapper[5002]: E0930 12:20:40.202900 5002 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Sep 30 12:20:40 crc kubenswrapper[5002]: W0930 12:20:40.227507 5002 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Sep 30 12:20:40 crc kubenswrapper[5002]: I0930 12:20:40.227632 5002 trace.go:236] Trace[973258147]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Sep-2025 12:20:30.225) (total time: 10001ms): Sep 30 12:20:40 crc kubenswrapper[5002]: Trace[973258147]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (12:20:40.227) Sep 30 12:20:40 crc kubenswrapper[5002]: Trace[973258147]: [10.001962728s] [10.001962728s] END Sep 30 12:20:40 crc kubenswrapper[5002]: E0930 12:20:40.227665 5002 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Sep 30 12:20:40 crc kubenswrapper[5002]: I0930 12:20:40.607701 5002 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Sep 30 12:20:40 crc kubenswrapper[5002]: I0930 12:20:40.655245 5002 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Sep 30 12:20:40 crc kubenswrapper[5002]: I0930 12:20:40.655311 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Sep 30 12:20:40 crc kubenswrapper[5002]: I0930 12:20:40.665135 5002 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Sep 30 12:20:40 crc kubenswrapper[5002]: I0930 12:20:40.665239 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Sep 30 12:20:40 crc kubenswrapper[5002]: I0930 12:20:40.790243 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 30 12:20:40 crc kubenswrapper[5002]: I0930 12:20:40.792356 5002 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="55317dffb5016a5abff9cd21ab18f36efcc0fafffc774e14df49fb36e9d69d08" exitCode=255 Sep 30 12:20:40 crc kubenswrapper[5002]: I0930 12:20:40.792394 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"55317dffb5016a5abff9cd21ab18f36efcc0fafffc774e14df49fb36e9d69d08"} Sep 30 12:20:40 crc kubenswrapper[5002]: I0930 12:20:40.792540 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:40 crc kubenswrapper[5002]: I0930 12:20:40.793225 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:40 crc kubenswrapper[5002]: I0930 12:20:40.793262 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:40 crc kubenswrapper[5002]: I0930 12:20:40.793271 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:40 crc kubenswrapper[5002]: I0930 12:20:40.793793 5002 scope.go:117] "RemoveContainer" containerID="55317dffb5016a5abff9cd21ab18f36efcc0fafffc774e14df49fb36e9d69d08" Sep 30 12:20:40 crc kubenswrapper[5002]: I0930 12:20:40.805038 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 12:20:40 crc kubenswrapper[5002]: I0930 12:20:40.805187 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:40 crc kubenswrapper[5002]: I0930 12:20:40.807200 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:40 crc kubenswrapper[5002]: I0930 12:20:40.807295 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:40 crc kubenswrapper[5002]: I0930 12:20:40.807350 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:41 crc kubenswrapper[5002]: I0930 12:20:41.466575 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 12:20:41 crc kubenswrapper[5002]: I0930 12:20:41.796385 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 30 12:20:41 crc kubenswrapper[5002]: I0930 12:20:41.798019 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20"} Sep 30 12:20:41 crc kubenswrapper[5002]: I0930 12:20:41.798111 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:41 crc kubenswrapper[5002]: I0930 12:20:41.798954 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:41 crc kubenswrapper[5002]: I0930 12:20:41.798986 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:41 crc kubenswrapper[5002]: I0930 12:20:41.798998 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:41 crc kubenswrapper[5002]: I0930 12:20:41.899103 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 12:20:42 crc kubenswrapper[5002]: I0930 12:20:42.801326 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:42 crc kubenswrapper[5002]: I0930 12:20:42.802736 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:42 crc kubenswrapper[5002]: I0930 12:20:42.802788 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:42 crc kubenswrapper[5002]: I0930 12:20:42.802804 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:43 crc kubenswrapper[5002]: I0930 12:20:43.019854 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 12:20:43 crc kubenswrapper[5002]: I0930 12:20:43.708376 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 12:20:43 crc kubenswrapper[5002]: I0930 12:20:43.804009 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:43 crc kubenswrapper[5002]: I0930 12:20:43.805439 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:43 crc kubenswrapper[5002]: I0930 12:20:43.805525 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:43 crc kubenswrapper[5002]: I0930 12:20:43.805549 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:44 crc kubenswrapper[5002]: I0930 12:20:44.109661 5002 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Sep 30 12:20:44 crc kubenswrapper[5002]: I0930 12:20:44.185738 5002 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 30 12:20:44 crc kubenswrapper[5002]: I0930 12:20:44.185800 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 30 12:20:44 crc kubenswrapper[5002]: I0930 12:20:44.805888 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:44 crc kubenswrapper[5002]: I0930 12:20:44.807438 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:44 crc kubenswrapper[5002]: I0930 12:20:44.807517 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:44 crc kubenswrapper[5002]: I0930 12:20:44.807536 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:45 crc kubenswrapper[5002]: I0930 12:20:45.194301 5002 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Sep 30 12:20:45 crc kubenswrapper[5002]: E0930 12:20:45.652694 5002 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Sep 30 12:20:45 crc kubenswrapper[5002]: I0930 12:20:45.655049 5002 trace.go:236] Trace[136672439]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Sep-2025 12:20:33.016) (total time: 12638ms): Sep 30 12:20:45 crc kubenswrapper[5002]: Trace[136672439]: ---"Objects listed" error: 12638ms (12:20:45.654) Sep 30 12:20:45 crc kubenswrapper[5002]: Trace[136672439]: [12.638407865s] [12.638407865s] END Sep 30 12:20:45 crc kubenswrapper[5002]: I0930 12:20:45.655318 5002 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Sep 30 12:20:45 crc kubenswrapper[5002]: I0930 12:20:45.655670 5002 trace.go:236] Trace[1839252451]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Sep-2025 12:20:30.921) (total time: 14734ms): Sep 30 12:20:45 crc kubenswrapper[5002]: Trace[1839252451]: ---"Objects listed" error: 14734ms (12:20:45.655) Sep 30 12:20:45 crc kubenswrapper[5002]: Trace[1839252451]: [14.734269297s] [14.734269297s] END Sep 30 12:20:45 crc kubenswrapper[5002]: I0930 12:20:45.655701 5002 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Sep 30 12:20:45 crc kubenswrapper[5002]: I0930 12:20:45.656265 5002 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Sep 30 12:20:45 crc kubenswrapper[5002]: E0930 12:20:45.660534 5002 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.603147 5002 apiserver.go:52] "Watching apiserver" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.606379 5002 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.606790 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c"] Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.607105 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.607661 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:20:46 crc kubenswrapper[5002]: E0930 12:20:46.607799 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.608201 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.608225 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.608538 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 12:20:46 crc kubenswrapper[5002]: E0930 12:20:46.609558 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.613697 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:20:46 crc kubenswrapper[5002]: E0930 12:20:46.613964 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.620266 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.620350 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.621898 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.621797 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.620700 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.622361 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.622803 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.623051 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.623326 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.651143 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.662298 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.662342 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.663317 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.663359 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.663377 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.672056 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.683931 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.696712 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.707879 5002 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.713329 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.725095 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.740566 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.753044 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.763792 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.763924 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.764004 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.764079 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.764147 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.764211 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.764304 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.764368 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.764434 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.888279 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.888370 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.888419 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.888513 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.888575 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.888640 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.888721 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.888773 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.888874 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.888942 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.888997 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.889059 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.889174 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.889235 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.889297 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.889357 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.889421 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.889511 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.889613 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.889676 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.889730 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.889798 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.889871 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.889934 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.889999 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.890053 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.890114 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.890178 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.890232 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.890295 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.890363 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.890356 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.890437 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.890721 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.890763 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.890797 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.890828 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.890855 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.890899 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.890947 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.891054 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.891110 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.891154 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.891182 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.891212 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.891274 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.891302 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.891327 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.891360 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.891396 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.891424 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.891468 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.891529 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.891563 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.891589 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.891618 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.891652 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.891695 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.891725 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.891759 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.891786 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.891821 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.891852 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.891880 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.891911 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.891941 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.891969 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.891997 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.892031 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.892064 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.892088 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.892118 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.892149 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.892189 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.892233 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.892280 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.892316 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.892340 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.892374 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.892415 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.893801 5002 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.895696 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.895751 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.895795 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.895830 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.895864 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.895897 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.895930 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.895963 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.895992 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.896023 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.896059 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.896090 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.896121 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.896154 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.896185 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.896212 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.896245 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.896279 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.896316 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.896346 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.896379 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.896411 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.896442 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.896660 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.896700 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.896742 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.896779 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.896810 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.896863 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.896892 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.896925 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.896957 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.896985 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.897016 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.897048 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.897079 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.897109 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.897255 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.897287 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.897321 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.897362 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.897391 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.897427 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.897462 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.897516 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.897563 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.897597 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.897627 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.897660 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.897693 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.897720 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.897752 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.897783 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.897815 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.897843 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.897876 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.897907 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.897936 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.897970 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898003 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898033 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898068 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898101 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898133 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898163 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898195 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898227 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898256 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898289 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898324 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898352 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898385 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898418 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898451 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898508 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898545 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898577 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898606 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898640 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898676 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898711 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898745 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898780 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898813 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898845 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898879 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898911 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898942 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.898976 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.899010 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.899046 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.899075 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.899109 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.899143 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.899173 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.899207 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.899241 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.899303 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.899339 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.899373 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.899411 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.899440 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.899491 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.899524 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.899553 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.899592 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.899628 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.899717 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.899753 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.899787 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.899839 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.899904 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.899934 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.899968 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.900015 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.900057 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.900091 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.900129 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.887950 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.888171 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.913171 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.913442 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.913493 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.913631 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.913973 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.913960 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.913981 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.914003 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.890065 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.891502 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.893588 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.893759 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.894292 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.894304 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.894690 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.900619 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.901120 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.901581 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.902214 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.902595 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.902842 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.903332 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.914256 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.914267 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.903746 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.904002 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.908741 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.909378 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.909442 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.909757 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.909922 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.910174 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.910197 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.910599 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.914369 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.910608 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.910742 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.910958 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.911086 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.911221 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.911548 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.911719 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.910813 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.912229 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.912725 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.914860 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.889672 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.915054 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.915071 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.915206 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.915398 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.915686 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.915692 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.915773 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.916424 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.916518 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.916694 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.916832 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.916870 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.916553 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: E0930 12:20:46.917070 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.917188 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.917379 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: E0930 12:20:46.917707 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 12:20:46 crc kubenswrapper[5002]: E0930 12:20:46.917761 5002 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.917799 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.917861 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: E0930 12:20:46.918314 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 12:20:47.418286982 +0000 UTC m=+21.667969168 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.917833 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.918525 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.918794 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.919150 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.919632 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.919779 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.920589 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.920689 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.920890 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.920911 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.921226 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.921264 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.922096 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.922331 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.922542 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.922778 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.922985 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.923406 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.923667 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.924079 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.924120 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.924179 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.924368 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.924451 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.924472 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.924464 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.924747 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.925013 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.924888 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.925178 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.925316 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.925349 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.925376 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.925783 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.925798 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.925831 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.925906 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.925922 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.926104 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.926272 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: E0930 12:20:46.926758 5002 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.926847 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.926910 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.926915 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: E0930 12:20:46.927019 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 12:20:47.426991883 +0000 UTC m=+21.676674159 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.927142 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.927512 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.927608 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.927887 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.928006 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.928229 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.928287 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: E0930 12:20:46.928354 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:20:47.428341411 +0000 UTC m=+21.678023557 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.928404 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.928454 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.930740 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.931854 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.932745 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.933694 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.934067 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.937992 5002 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20" exitCode=255 Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.938051 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20"} Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.938127 5002 scope.go:117] "RemoveContainer" containerID="55317dffb5016a5abff9cd21ab18f36efcc0fafffc774e14df49fb36e9d69d08" Sep 30 12:20:46 crc kubenswrapper[5002]: E0930 12:20:46.941301 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 12:20:46 crc kubenswrapper[5002]: E0930 12:20:46.941335 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 12:20:46 crc kubenswrapper[5002]: E0930 12:20:46.941350 5002 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:20:46 crc kubenswrapper[5002]: E0930 12:20:46.941426 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 12:20:47.441403322 +0000 UTC m=+21.691085468 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.948585 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.959590 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.967824 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.975539 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.979099 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.979630 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.980059 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.980213 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.980284 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.980776 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.980936 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.981092 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.980405 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.981349 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.981713 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.981777 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.981829 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.981850 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.981861 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.981861 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.981916 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.982128 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.982295 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: E0930 12:20:46.982317 5002 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 12:20:46 crc kubenswrapper[5002]: E0930 12:20:46.982399 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 12:20:47.482378936 +0000 UTC m=+21.732061102 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.982502 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.982503 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.982546 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.982622 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.982638 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.981747 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.983608 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.983746 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.984719 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.985538 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.986697 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.987070 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.987205 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.987365 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.993624 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.996256 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Sep 30 12:20:46 crc kubenswrapper[5002]: I0930 12:20:46.996804 5002 scope.go:117] "RemoveContainer" containerID="49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20" Sep 30 12:20:46 crc kubenswrapper[5002]: E0930 12:20:46.997208 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001078 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001211 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001315 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001383 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001571 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001601 5002 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001620 5002 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001636 5002 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001653 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001669 5002 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001694 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001707 5002 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001720 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001734 5002 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001747 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001761 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001784 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001798 5002 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001811 5002 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001823 5002 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001835 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001847 5002 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001860 5002 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001879 5002 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001891 5002 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001905 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001918 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001931 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001943 5002 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001955 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001968 5002 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001981 5002 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.001999 5002 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002012 5002 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002025 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002038 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002050 5002 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002062 5002 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002075 5002 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002087 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002098 5002 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002110 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002123 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002135 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002146 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002158 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002170 5002 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002183 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002195 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002208 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002220 5002 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002232 5002 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002243 5002 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002255 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002266 5002 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002278 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002290 5002 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002302 5002 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002314 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002327 5002 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002343 5002 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002377 5002 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002393 5002 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002410 5002 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002430 5002 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002446 5002 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002463 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002507 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002360 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002528 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002523 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002583 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002601 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002620 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002638 5002 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002660 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002679 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002701 5002 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002720 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002739 5002 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002758 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002773 5002 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002793 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002807 5002 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002818 5002 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002831 5002 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002844 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002856 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002919 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002940 5002 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002957 5002 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.002972 5002 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003024 5002 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003041 5002 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003056 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003074 5002 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003095 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003114 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003132 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003149 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003167 5002 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003183 5002 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003218 5002 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003234 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003253 5002 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003271 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003287 5002 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003311 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003330 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003346 5002 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003363 5002 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003379 5002 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003395 5002 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003413 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003430 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003446 5002 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003463 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003507 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003524 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003540 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003557 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003575 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003591 5002 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003607 5002 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003623 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003640 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003656 5002 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003673 5002 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003692 5002 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003709 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003725 5002 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003741 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003726 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003759 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003781 5002 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003798 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003814 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003831 5002 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003849 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003868 5002 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003885 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003907 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003922 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003938 5002 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003955 5002 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003971 5002 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.003988 5002 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.004005 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.004023 5002 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.014205 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.019177 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.019237 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.019310 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.019228 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.019820 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.019943 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.022126 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.036757 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.036762 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.038384 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.086288 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.086584 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.086636 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.086888 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.086916 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.086904 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.086985 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.087196 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.087214 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.088087 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.088116 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.087592 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.087624 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.088184 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.087841 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.087897 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.087941 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.087973 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.087988 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.088445 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.089350 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.089354 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.089426 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.089604 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.089782 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.089798 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.089843 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.089871 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.089814 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.089908 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.089976 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.090053 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.090064 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.090170 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.090688 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.092369 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.095876 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.101563 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.105503 5002 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.105534 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.105547 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.105559 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.105571 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.105587 5002 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.105603 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.105619 5002 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.105634 5002 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.105650 5002 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.105666 5002 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.105712 5002 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.105794 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.105830 5002 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.105848 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.105864 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.105874 5002 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.105882 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.105892 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.105902 5002 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.105911 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.105920 5002 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.105929 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.105971 5002 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.105983 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.105994 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.106003 5002 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.106013 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.106023 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.106032 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.106042 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.106051 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.106060 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.106068 5002 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.106077 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.106087 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.106097 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.106107 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.106116 5002 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.106132 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.106141 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.106150 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.106161 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.106170 5002 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.106178 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.106187 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.106196 5002 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.106204 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.212763 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.235923 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.248074 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.254747 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.308089 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.387622 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.408821 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 12:20:47 crc kubenswrapper[5002]: W0930 12:20:47.500731 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-cc511ac866b63932eecc1e0fc53da99a7b5ef5fa0266875f3085d422d16a7692 WatchSource:0}: Error finding container cc511ac866b63932eecc1e0fc53da99a7b5ef5fa0266875f3085d422d16a7692: Status 404 returned error can't find the container with id cc511ac866b63932eecc1e0fc53da99a7b5ef5fa0266875f3085d422d16a7692 Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.509591 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:20:47 crc kubenswrapper[5002]: E0930 12:20:47.509874 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:20:48.509820828 +0000 UTC m=+22.759502974 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.509940 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.509981 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.510018 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:20:47 crc kubenswrapper[5002]: E0930 12:20:47.510038 5002 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.510054 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:20:47 crc kubenswrapper[5002]: E0930 12:20:47.510095 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 12:20:48.510078825 +0000 UTC m=+22.759761031 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 12:20:47 crc kubenswrapper[5002]: E0930 12:20:47.510129 5002 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 12:20:47 crc kubenswrapper[5002]: E0930 12:20:47.510271 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 12:20:48.5102581 +0000 UTC m=+22.759940246 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 12:20:47 crc kubenswrapper[5002]: E0930 12:20:47.510166 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 12:20:47 crc kubenswrapper[5002]: E0930 12:20:47.510301 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 12:20:47 crc kubenswrapper[5002]: E0930 12:20:47.510317 5002 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:20:47 crc kubenswrapper[5002]: E0930 12:20:47.510354 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 12:20:48.510344792 +0000 UTC m=+22.760026988 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:20:47 crc kubenswrapper[5002]: E0930 12:20:47.510202 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 12:20:47 crc kubenswrapper[5002]: E0930 12:20:47.510384 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 12:20:47 crc kubenswrapper[5002]: E0930 12:20:47.510394 5002 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:20:47 crc kubenswrapper[5002]: E0930 12:20:47.510418 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 12:20:48.510412695 +0000 UTC m=+22.760094841 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.941720 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08"} Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.941765 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"a51800c307f1060ca1725824cbd5a0a1ce6f50ea2a3e461104a6bcac86b504cb"} Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.944118 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.946625 5002 scope.go:117] "RemoveContainer" containerID="49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20" Sep 30 12:20:47 crc kubenswrapper[5002]: E0930 12:20:47.946748 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.949203 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e"} Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.949228 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e"} Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.949239 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"cc511ac866b63932eecc1e0fc53da99a7b5ef5fa0266875f3085d422d16a7692"} Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.950230 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"46fa96f7e8d1596f4e9d237b7814011b8907c5bacef40ffe167c195489f0154f"} Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.962862 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.971315 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.982285 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:47 crc kubenswrapper[5002]: I0930 12:20:47.992002 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.002443 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.014199 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55317dffb5016a5abff9cd21ab18f36efcc0fafffc774e14df49fb36e9d69d08\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:40Z\\\",\\\"message\\\":\\\"W0930 12:20:29.874243 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 12:20:29.874659 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759234829 cert, and key in /tmp/serving-cert-1124809756/serving-signer.crt, /tmp/serving-cert-1124809756/serving-signer.key\\\\nI0930 12:20:30.003657 1 observer_polling.go:159] Starting file observer\\\\nW0930 12:20:30.011726 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 12:20:30.014377 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:30.020178 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1124809756/tls.crt::/tmp/serving-cert-1124809756/tls.key\\\\\\\"\\\\nF0930 12:20:40.535827 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.023612 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.024519 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.034282 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.036027 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.039527 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.044971 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.059536 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.068351 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.078758 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.087723 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.098315 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.122041 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.131892 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.141188 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.150567 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.170667 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.187259 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.202405 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.213970 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.517382 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.517555 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.517623 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.517675 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.517726 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:20:48 crc kubenswrapper[5002]: E0930 12:20:48.517817 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 12:20:48 crc kubenswrapper[5002]: E0930 12:20:48.517860 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 12:20:48 crc kubenswrapper[5002]: E0930 12:20:48.517878 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 12:20:48 crc kubenswrapper[5002]: E0930 12:20:48.517885 5002 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:20:48 crc kubenswrapper[5002]: E0930 12:20:48.517907 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 12:20:48 crc kubenswrapper[5002]: E0930 12:20:48.517927 5002 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:20:48 crc kubenswrapper[5002]: E0930 12:20:48.517980 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 12:20:50.517948527 +0000 UTC m=+24.767630713 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:20:48 crc kubenswrapper[5002]: E0930 12:20:48.518015 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 12:20:50.517999388 +0000 UTC m=+24.767681564 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:20:48 crc kubenswrapper[5002]: E0930 12:20:48.518049 5002 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 12:20:48 crc kubenswrapper[5002]: E0930 12:20:48.518108 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 12:20:50.518086521 +0000 UTC m=+24.767768707 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 12:20:48 crc kubenswrapper[5002]: E0930 12:20:48.518170 5002 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 12:20:48 crc kubenswrapper[5002]: E0930 12:20:48.518217 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 12:20:50.518201064 +0000 UTC m=+24.767883250 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 12:20:48 crc kubenswrapper[5002]: E0930 12:20:48.518693 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:20:50.518667108 +0000 UTC m=+24.768349294 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.676009 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.676516 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:20:48 crc kubenswrapper[5002]: E0930 12:20:48.676606 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.676940 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:20:48 crc kubenswrapper[5002]: E0930 12:20:48.677326 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:20:48 crc kubenswrapper[5002]: E0930 12:20:48.677008 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.679800 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.680542 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.682343 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.683120 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.684189 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.684672 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.685218 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.686169 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.686846 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.687785 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.688301 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.689350 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.689836 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.690372 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.691239 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.691787 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.692733 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.693133 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.693694 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.694725 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.695210 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.696212 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.696686 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.697672 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.698109 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.698817 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.699872 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.700338 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.701247 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.701745 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.702554 5002 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.702654 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.704214 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.705089 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.705528 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.706981 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.707754 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.708675 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.709265 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.710393 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.710891 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.711954 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.712586 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.713598 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.714244 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.715324 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.715871 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.717198 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.717776 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.718850 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.719328 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.720375 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.720996 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Sep 30 12:20:48 crc kubenswrapper[5002]: I0930 12:20:48.721471 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Sep 30 12:20:49 crc kubenswrapper[5002]: I0930 12:20:49.756453 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-p45pn"] Sep 30 12:20:49 crc kubenswrapper[5002]: I0930 12:20:49.756742 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-p45pn" Sep 30 12:20:49 crc kubenswrapper[5002]: I0930 12:20:49.758940 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Sep 30 12:20:49 crc kubenswrapper[5002]: I0930 12:20:49.759360 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Sep 30 12:20:49 crc kubenswrapper[5002]: I0930 12:20:49.759768 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Sep 30 12:20:49 crc kubenswrapper[5002]: I0930 12:20:49.774535 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:49Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:49 crc kubenswrapper[5002]: I0930 12:20:49.789128 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:49Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:49 crc kubenswrapper[5002]: I0930 12:20:49.810734 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:49Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:49 crc kubenswrapper[5002]: I0930 12:20:49.828518 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/d0334cb8-cf38-4e50-80e8-2b81d8d46ae7-hosts-file\") pod \"node-resolver-p45pn\" (UID: \"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\") " pod="openshift-dns/node-resolver-p45pn" Sep 30 12:20:49 crc kubenswrapper[5002]: I0930 12:20:49.828786 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2nvm4\" (UniqueName: \"kubernetes.io/projected/d0334cb8-cf38-4e50-80e8-2b81d8d46ae7-kube-api-access-2nvm4\") pod \"node-resolver-p45pn\" (UID: \"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\") " pod="openshift-dns/node-resolver-p45pn" Sep 30 12:20:49 crc kubenswrapper[5002]: I0930 12:20:49.839167 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:49Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:49 crc kubenswrapper[5002]: I0930 12:20:49.854445 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:49Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:49 crc kubenswrapper[5002]: I0930 12:20:49.872111 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:49Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:49 crc kubenswrapper[5002]: I0930 12:20:49.888870 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:49Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:49 crc kubenswrapper[5002]: I0930 12:20:49.922452 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:49Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:49 crc kubenswrapper[5002]: I0930 12:20:49.929668 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/d0334cb8-cf38-4e50-80e8-2b81d8d46ae7-hosts-file\") pod \"node-resolver-p45pn\" (UID: \"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\") " pod="openshift-dns/node-resolver-p45pn" Sep 30 12:20:49 crc kubenswrapper[5002]: I0930 12:20:49.929761 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2nvm4\" (UniqueName: \"kubernetes.io/projected/d0334cb8-cf38-4e50-80e8-2b81d8d46ae7-kube-api-access-2nvm4\") pod \"node-resolver-p45pn\" (UID: \"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\") " pod="openshift-dns/node-resolver-p45pn" Sep 30 12:20:49 crc kubenswrapper[5002]: I0930 12:20:49.930200 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/d0334cb8-cf38-4e50-80e8-2b81d8d46ae7-hosts-file\") pod \"node-resolver-p45pn\" (UID: \"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\") " pod="openshift-dns/node-resolver-p45pn" Sep 30 12:20:49 crc kubenswrapper[5002]: I0930 12:20:49.964521 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2nvm4\" (UniqueName: \"kubernetes.io/projected/d0334cb8-cf38-4e50-80e8-2b81d8d46ae7-kube-api-access-2nvm4\") pod \"node-resolver-p45pn\" (UID: \"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\") " pod="openshift-dns/node-resolver-p45pn" Sep 30 12:20:49 crc kubenswrapper[5002]: I0930 12:20:49.969078 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:49Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.067693 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-p45pn" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.212624 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-2lqq2"] Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.213595 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-ttfn8"] Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.213757 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.213983 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.215742 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-ncbb5"] Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.216420 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.219157 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.219367 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.219500 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.219541 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.219653 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.219722 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.219655 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.220084 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.220097 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.220157 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.220337 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.220402 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.232702 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7b7df259-4156-484c-bedd-543ca42f2970-system-cni-dir\") pod \"multus-additional-cni-plugins-2lqq2\" (UID: \"7b7df259-4156-484c-bedd-543ca42f2970\") " pod="openshift-multus/multus-additional-cni-plugins-2lqq2" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.232739 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7b7df259-4156-484c-bedd-543ca42f2970-cni-binary-copy\") pod \"multus-additional-cni-plugins-2lqq2\" (UID: \"7b7df259-4156-484c-bedd-543ca42f2970\") " pod="openshift-multus/multus-additional-cni-plugins-2lqq2" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.232759 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-multus-daemon-config\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.232784 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bsjn\" (UniqueName: \"kubernetes.io/projected/7b7df259-4156-484c-bedd-543ca42f2970-kube-api-access-2bsjn\") pod \"multus-additional-cni-plugins-2lqq2\" (UID: \"7b7df259-4156-484c-bedd-543ca42f2970\") " pod="openshift-multus/multus-additional-cni-plugins-2lqq2" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.232803 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-cnibin\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.232887 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-cni-binary-copy\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.232948 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/341a55c6-78d3-4fa2-8f47-b56fd41fa1c1-mcd-auth-proxy-config\") pod \"machine-config-daemon-ncbb5\" (UID: \"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\") " pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.232969 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-multus-socket-dir-parent\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.232984 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-host-var-lib-cni-multus\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.233003 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/7b7df259-4156-484c-bedd-543ca42f2970-os-release\") pod \"multus-additional-cni-plugins-2lqq2\" (UID: \"7b7df259-4156-484c-bedd-543ca42f2970\") " pod="openshift-multus/multus-additional-cni-plugins-2lqq2" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.233019 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-host-var-lib-kubelet\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.233035 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-os-release\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.233050 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-host-run-netns\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.233147 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-host-run-multus-certs\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.233211 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/341a55c6-78d3-4fa2-8f47-b56fd41fa1c1-proxy-tls\") pod \"machine-config-daemon-ncbb5\" (UID: \"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\") " pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.233238 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-host-var-lib-cni-bin\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.233258 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-hostroot\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.233277 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-etc-kubernetes\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.233294 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/7b7df259-4156-484c-bedd-543ca42f2970-tuning-conf-dir\") pod \"multus-additional-cni-plugins-2lqq2\" (UID: \"7b7df259-4156-484c-bedd-543ca42f2970\") " pod="openshift-multus/multus-additional-cni-plugins-2lqq2" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.233311 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-multus-conf-dir\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.233348 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbwzb\" (UniqueName: \"kubernetes.io/projected/341a55c6-78d3-4fa2-8f47-b56fd41fa1c1-kube-api-access-sbwzb\") pod \"machine-config-daemon-ncbb5\" (UID: \"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\") " pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.233376 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/341a55c6-78d3-4fa2-8f47-b56fd41fa1c1-rootfs\") pod \"machine-config-daemon-ncbb5\" (UID: \"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\") " pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.233400 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/7b7df259-4156-484c-bedd-543ca42f2970-cnibin\") pod \"multus-additional-cni-plugins-2lqq2\" (UID: \"7b7df259-4156-484c-bedd-543ca42f2970\") " pod="openshift-multus/multus-additional-cni-plugins-2lqq2" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.233416 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-multus-cni-dir\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.233432 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7b7df259-4156-484c-bedd-543ca42f2970-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-2lqq2\" (UID: \"7b7df259-4156-484c-bedd-543ca42f2970\") " pod="openshift-multus/multus-additional-cni-plugins-2lqq2" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.233540 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-system-cni-dir\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.233592 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-host-run-k8s-cni-cncf-io\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.233630 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvf2n\" (UniqueName: \"kubernetes.io/projected/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-kube-api-access-lvf2n\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.234944 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:50Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.250512 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:50Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.275955 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:50Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.296403 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:50Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.314576 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:50Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.327984 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:50Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334273 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/341a55c6-78d3-4fa2-8f47-b56fd41fa1c1-proxy-tls\") pod \"machine-config-daemon-ncbb5\" (UID: \"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\") " pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334315 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-host-var-lib-cni-bin\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334340 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-hostroot\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334364 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-etc-kubernetes\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334386 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/7b7df259-4156-484c-bedd-543ca42f2970-tuning-conf-dir\") pod \"multus-additional-cni-plugins-2lqq2\" (UID: \"7b7df259-4156-484c-bedd-543ca42f2970\") " pod="openshift-multus/multus-additional-cni-plugins-2lqq2" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334406 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-multus-conf-dir\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334417 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-host-var-lib-cni-bin\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334438 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbwzb\" (UniqueName: \"kubernetes.io/projected/341a55c6-78d3-4fa2-8f47-b56fd41fa1c1-kube-api-access-sbwzb\") pod \"machine-config-daemon-ncbb5\" (UID: \"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\") " pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334462 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/341a55c6-78d3-4fa2-8f47-b56fd41fa1c1-rootfs\") pod \"machine-config-daemon-ncbb5\" (UID: \"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\") " pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334499 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/7b7df259-4156-484c-bedd-543ca42f2970-cnibin\") pod \"multus-additional-cni-plugins-2lqq2\" (UID: \"7b7df259-4156-484c-bedd-543ca42f2970\") " pod="openshift-multus/multus-additional-cni-plugins-2lqq2" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334502 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-hostroot\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334532 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-multus-cni-dir\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334556 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7b7df259-4156-484c-bedd-543ca42f2970-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-2lqq2\" (UID: \"7b7df259-4156-484c-bedd-543ca42f2970\") " pod="openshift-multus/multus-additional-cni-plugins-2lqq2" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334580 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-system-cni-dir\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334577 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/7b7df259-4156-484c-bedd-543ca42f2970-cnibin\") pod \"multus-additional-cni-plugins-2lqq2\" (UID: \"7b7df259-4156-484c-bedd-543ca42f2970\") " pod="openshift-multus/multus-additional-cni-plugins-2lqq2" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334602 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-host-run-k8s-cni-cncf-io\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334612 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/341a55c6-78d3-4fa2-8f47-b56fd41fa1c1-rootfs\") pod \"machine-config-daemon-ncbb5\" (UID: \"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\") " pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334623 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvf2n\" (UniqueName: \"kubernetes.io/projected/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-kube-api-access-lvf2n\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334641 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7b7df259-4156-484c-bedd-543ca42f2970-system-cni-dir\") pod \"multus-additional-cni-plugins-2lqq2\" (UID: \"7b7df259-4156-484c-bedd-543ca42f2970\") " pod="openshift-multus/multus-additional-cni-plugins-2lqq2" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334660 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7b7df259-4156-484c-bedd-543ca42f2970-cni-binary-copy\") pod \"multus-additional-cni-plugins-2lqq2\" (UID: \"7b7df259-4156-484c-bedd-543ca42f2970\") " pod="openshift-multus/multus-additional-cni-plugins-2lqq2" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334674 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-multus-daemon-config\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334697 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bsjn\" (UniqueName: \"kubernetes.io/projected/7b7df259-4156-484c-bedd-543ca42f2970-kube-api-access-2bsjn\") pod \"multus-additional-cni-plugins-2lqq2\" (UID: \"7b7df259-4156-484c-bedd-543ca42f2970\") " pod="openshift-multus/multus-additional-cni-plugins-2lqq2" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334712 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-cnibin\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334729 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-cni-binary-copy\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334754 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/341a55c6-78d3-4fa2-8f47-b56fd41fa1c1-mcd-auth-proxy-config\") pod \"machine-config-daemon-ncbb5\" (UID: \"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\") " pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334771 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-multus-socket-dir-parent\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334795 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-host-var-lib-cni-multus\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334823 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/7b7df259-4156-484c-bedd-543ca42f2970-os-release\") pod \"multus-additional-cni-plugins-2lqq2\" (UID: \"7b7df259-4156-484c-bedd-543ca42f2970\") " pod="openshift-multus/multus-additional-cni-plugins-2lqq2" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334848 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-host-var-lib-kubelet\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334870 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-os-release\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334890 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-host-run-netns\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334919 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-host-run-multus-certs\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334952 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-multus-cni-dir\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334978 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-host-run-multus-certs\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.335019 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-host-var-lib-cni-multus\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.335070 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/7b7df259-4156-484c-bedd-543ca42f2970-tuning-conf-dir\") pod \"multus-additional-cni-plugins-2lqq2\" (UID: \"7b7df259-4156-484c-bedd-543ca42f2970\") " pod="openshift-multus/multus-additional-cni-plugins-2lqq2" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.335240 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/7b7df259-4156-484c-bedd-543ca42f2970-os-release\") pod \"multus-additional-cni-plugins-2lqq2\" (UID: \"7b7df259-4156-484c-bedd-543ca42f2970\") " pod="openshift-multus/multus-additional-cni-plugins-2lqq2" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.335246 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7b7df259-4156-484c-bedd-543ca42f2970-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-2lqq2\" (UID: \"7b7df259-4156-484c-bedd-543ca42f2970\") " pod="openshift-multus/multus-additional-cni-plugins-2lqq2" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334821 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-multus-conf-dir\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.335296 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-host-var-lib-kubelet\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.334525 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-etc-kubernetes\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.335350 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-os-release\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.335378 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-host-run-netns\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.335500 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-system-cni-dir\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.335532 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-host-run-k8s-cni-cncf-io\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.335551 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-multus-socket-dir-parent\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.335565 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7b7df259-4156-484c-bedd-543ca42f2970-system-cni-dir\") pod \"multus-additional-cni-plugins-2lqq2\" (UID: \"7b7df259-4156-484c-bedd-543ca42f2970\") " pod="openshift-multus/multus-additional-cni-plugins-2lqq2" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.335551 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-multus-daemon-config\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.335599 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-cnibin\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.335808 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-cni-binary-copy\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.335831 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/341a55c6-78d3-4fa2-8f47-b56fd41fa1c1-mcd-auth-proxy-config\") pod \"machine-config-daemon-ncbb5\" (UID: \"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\") " pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.336072 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7b7df259-4156-484c-bedd-543ca42f2970-cni-binary-copy\") pod \"multus-additional-cni-plugins-2lqq2\" (UID: \"7b7df259-4156-484c-bedd-543ca42f2970\") " pod="openshift-multus/multus-additional-cni-plugins-2lqq2" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.337128 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/341a55c6-78d3-4fa2-8f47-b56fd41fa1c1-proxy-tls\") pod \"machine-config-daemon-ncbb5\" (UID: \"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\") " pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.342027 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:50Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.355671 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bsjn\" (UniqueName: \"kubernetes.io/projected/7b7df259-4156-484c-bedd-543ca42f2970-kube-api-access-2bsjn\") pod \"multus-additional-cni-plugins-2lqq2\" (UID: \"7b7df259-4156-484c-bedd-543ca42f2970\") " pod="openshift-multus/multus-additional-cni-plugins-2lqq2" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.356048 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvf2n\" (UniqueName: \"kubernetes.io/projected/2bd9f18d-bfb3-4bd7-9a87-242029cd3200-kube-api-access-lvf2n\") pod \"multus-ttfn8\" (UID: \"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\") " pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.359195 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:50Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.363923 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbwzb\" (UniqueName: \"kubernetes.io/projected/341a55c6-78d3-4fa2-8f47-b56fd41fa1c1-kube-api-access-sbwzb\") pod \"machine-config-daemon-ncbb5\" (UID: \"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\") " pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.421377 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:50Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.447226 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:50Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.476222 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:50Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.507499 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:50Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.530283 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:50Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.536557 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.536652 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.536680 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.536718 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.536735 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:20:50 crc kubenswrapper[5002]: E0930 12:20:50.536762 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:20:54.536735615 +0000 UTC m=+28.786417771 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:20:50 crc kubenswrapper[5002]: E0930 12:20:50.536824 5002 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 12:20:50 crc kubenswrapper[5002]: E0930 12:20:50.536866 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 12:20:50 crc kubenswrapper[5002]: E0930 12:20:50.536888 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 12:20:50 crc kubenswrapper[5002]: E0930 12:20:50.536920 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 12:20:50 crc kubenswrapper[5002]: E0930 12:20:50.536933 5002 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:20:50 crc kubenswrapper[5002]: E0930 12:20:50.536893 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 12:20:50 crc kubenswrapper[5002]: E0930 12:20:50.536983 5002 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:20:50 crc kubenswrapper[5002]: E0930 12:20:50.536874 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 12:20:54.536860709 +0000 UTC m=+28.786542855 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 12:20:50 crc kubenswrapper[5002]: E0930 12:20:50.537027 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 12:20:54.537006863 +0000 UTC m=+28.786689009 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:20:50 crc kubenswrapper[5002]: E0930 12:20:50.537038 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 12:20:54.537033693 +0000 UTC m=+28.786715839 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:20:50 crc kubenswrapper[5002]: E0930 12:20:50.536892 5002 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 12:20:50 crc kubenswrapper[5002]: E0930 12:20:50.537073 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 12:20:54.537067954 +0000 UTC m=+28.786750100 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.542315 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:50Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.576277 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:50Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.588019 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:50Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.598372 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.603296 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:50Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.608798 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-ttfn8" Sep 30 12:20:50 crc kubenswrapper[5002]: W0930 12:20:50.621279 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2bd9f18d_bfb3_4bd7_9a87_242029cd3200.slice/crio-3bce1376962c9aaceac355c8e943559f467fce40edcaf8b9c9294a715b1abde1 WatchSource:0}: Error finding container 3bce1376962c9aaceac355c8e943559f467fce40edcaf8b9c9294a715b1abde1: Status 404 returned error can't find the container with id 3bce1376962c9aaceac355c8e943559f467fce40edcaf8b9c9294a715b1abde1 Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.626078 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.630614 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:50Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:50 crc kubenswrapper[5002]: W0930 12:20:50.641919 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod341a55c6_78d3_4fa2_8f47_b56fd41fa1c1.slice/crio-65696b7fd3b7b806a6c7dc1ab8e8a842bf6479124d999957da26f8dc3b4f1d3f WatchSource:0}: Error finding container 65696b7fd3b7b806a6c7dc1ab8e8a842bf6479124d999957da26f8dc3b4f1d3f: Status 404 returned error can't find the container with id 65696b7fd3b7b806a6c7dc1ab8e8a842bf6479124d999957da26f8dc3b4f1d3f Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.650852 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:50Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.668903 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:50Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.675991 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:20:50 crc kubenswrapper[5002]: E0930 12:20:50.676154 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.676687 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:20:50 crc kubenswrapper[5002]: E0930 12:20:50.676747 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.676942 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:20:50 crc kubenswrapper[5002]: E0930 12:20:50.677111 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.686087 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:50Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.705263 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:50Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.959797 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4"} Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.960879 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4pvsr"] Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.961866 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerStarted","Data":"7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4"} Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.961899 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerStarted","Data":"6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6"} Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.961914 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerStarted","Data":"65696b7fd3b7b806a6c7dc1ab8e8a842bf6479124d999957da26f8dc3b4f1d3f"} Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.962063 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.962575 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-ttfn8" event={"ID":"2bd9f18d-bfb3-4bd7-9a87-242029cd3200","Type":"ContainerStarted","Data":"0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5"} Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.962606 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-ttfn8" event={"ID":"2bd9f18d-bfb3-4bd7-9a87-242029cd3200","Type":"ContainerStarted","Data":"3bce1376962c9aaceac355c8e943559f467fce40edcaf8b9c9294a715b1abde1"} Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.963879 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" event={"ID":"7b7df259-4156-484c-bedd-543ca42f2970","Type":"ContainerStarted","Data":"6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66"} Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.963934 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" event={"ID":"7b7df259-4156-484c-bedd-543ca42f2970","Type":"ContainerStarted","Data":"020f654b9f639f514944e0778e8a0bbe343506e82138729685a5659e7273b0cd"} Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.964936 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-p45pn" event={"ID":"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7","Type":"ContainerStarted","Data":"ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de"} Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.964984 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-p45pn" event={"ID":"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7","Type":"ContainerStarted","Data":"e2dd96eeffbcd58f91fc9b0afbc6ba1981379257500be971a44f88946e7f0e93"} Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.969518 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.970274 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.971000 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.972130 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.972227 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.972140 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.972811 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Sep 30 12:20:50 crc kubenswrapper[5002]: I0930 12:20:50.987542 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:50Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.005224 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.020502 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.031470 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.042406 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7095aa7a-d067-4977-bdc5-3a45a52a6a39-env-overrides\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.042555 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-kubelet\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.042579 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-run-systemd\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.042602 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7095aa7a-d067-4977-bdc5-3a45a52a6a39-ovn-node-metrics-cert\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.042673 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7095aa7a-d067-4977-bdc5-3a45a52a6a39-ovnkube-config\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.042696 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bntn\" (UniqueName: \"kubernetes.io/projected/7095aa7a-d067-4977-bdc5-3a45a52a6a39-kube-api-access-5bntn\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.042722 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-run-openvswitch\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.042809 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-run-ovn\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.042846 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-node-log\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.042895 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-slash\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.042915 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-run-netns\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.042937 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-cni-bin\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.042974 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7095aa7a-d067-4977-bdc5-3a45a52a6a39-ovnkube-script-lib\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.043011 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-var-lib-openvswitch\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.043034 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-run-ovn-kubernetes\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.043061 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-etc-openvswitch\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.043107 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.043612 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-systemd-units\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.043676 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-log-socket\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.043737 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-cni-netd\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.046182 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.066349 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.080864 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.094307 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.108296 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.122156 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.135356 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.144619 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.144654 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-cni-netd\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.144684 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-systemd-units\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.144701 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-log-socket\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.144719 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7095aa7a-d067-4977-bdc5-3a45a52a6a39-env-overrides\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.144733 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-kubelet\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.144746 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-run-systemd\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.144759 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7095aa7a-d067-4977-bdc5-3a45a52a6a39-ovn-node-metrics-cert\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.144779 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7095aa7a-d067-4977-bdc5-3a45a52a6a39-ovnkube-config\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.144795 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bntn\" (UniqueName: \"kubernetes.io/projected/7095aa7a-d067-4977-bdc5-3a45a52a6a39-kube-api-access-5bntn\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.144804 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.144810 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-run-openvswitch\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.144858 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-run-ovn\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.144885 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-systemd-units\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.144836 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-run-openvswitch\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.144905 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-node-log\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.144921 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-run-ovn\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.144928 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-log-socket\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.144771 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-cni-netd\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.145497 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7095aa7a-d067-4977-bdc5-3a45a52a6a39-ovnkube-config\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.144885 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-node-log\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.145561 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-kubelet\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.145541 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-run-systemd\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.145795 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-slash\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.145848 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-slash\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.145851 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-run-netns\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.145819 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7095aa7a-d067-4977-bdc5-3a45a52a6a39-env-overrides\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.145888 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-cni-bin\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.145943 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7095aa7a-d067-4977-bdc5-3a45a52a6a39-ovnkube-script-lib\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.145964 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-run-netns\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.145982 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-var-lib-openvswitch\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.145998 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-cni-bin\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.146021 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-run-ovn-kubernetes\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.146062 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-etc-openvswitch\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.146187 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-etc-openvswitch\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.146235 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-var-lib-openvswitch\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.146271 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-run-ovn-kubernetes\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.146446 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7095aa7a-d067-4977-bdc5-3a45a52a6a39-ovnkube-script-lib\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.149272 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7095aa7a-d067-4977-bdc5-3a45a52a6a39-ovn-node-metrics-cert\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.151917 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.175331 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.181053 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bntn\" (UniqueName: \"kubernetes.io/projected/7095aa7a-d067-4977-bdc5-3a45a52a6a39-kube-api-access-5bntn\") pod \"ovnkube-node-4pvsr\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.189992 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.193349 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.193647 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.197904 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.208391 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.219787 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.229850 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.244021 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.257253 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.267215 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.278204 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.297172 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.302311 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: W0930 12:20:51.314009 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7095aa7a_d067_4977_bdc5_3a45a52a6a39.slice/crio-9e7540d9f3130e8c7b820a75697164f7033839bdc4652ef7bb80df91032bfff5 WatchSource:0}: Error finding container 9e7540d9f3130e8c7b820a75697164f7033839bdc4652ef7bb80df91032bfff5: Status 404 returned error can't find the container with id 9e7540d9f3130e8c7b820a75697164f7033839bdc4652ef7bb80df91032bfff5 Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.323126 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.337026 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.350103 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.396383 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.422114 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.444609 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.466875 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.467633 5002 scope.go:117] "RemoveContainer" containerID="49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20" Sep 30 12:20:51 crc kubenswrapper[5002]: E0930 12:20:51.467831 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.469382 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.482756 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.494087 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.505251 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.514667 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.531427 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.542376 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.552934 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.564121 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.572517 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.584360 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.969149 5002 generic.go:334] "Generic (PLEG): container finished" podID="7b7df259-4156-484c-bedd-543ca42f2970" containerID="6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66" exitCode=0 Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.969196 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" event={"ID":"7b7df259-4156-484c-bedd-543ca42f2970","Type":"ContainerDied","Data":"6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66"} Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.976953 5002 generic.go:334] "Generic (PLEG): container finished" podID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerID="29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253" exitCode=0 Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.977183 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" event={"ID":"7095aa7a-d067-4977-bdc5-3a45a52a6a39","Type":"ContainerDied","Data":"29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253"} Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.977267 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" event={"ID":"7095aa7a-d067-4977-bdc5-3a45a52a6a39","Type":"ContainerStarted","Data":"9e7540d9f3130e8c7b820a75697164f7033839bdc4652ef7bb80df91032bfff5"} Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.987234 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:51 crc kubenswrapper[5002]: I0930 12:20:51.997354 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:51Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.015315 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.033300 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.049896 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.061366 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.063353 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.063384 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.063393 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.063515 5002 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.063836 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.070878 5002 kubelet_node_status.go:115] "Node was previously registered" node="crc" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.071249 5002 kubelet_node_status.go:79] "Successfully registered node" node="crc" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.072530 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.072556 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.072564 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.072578 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.072588 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:52Z","lastTransitionTime":"2025-09-30T12:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.075349 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: E0930 12:20:52.092956 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.096896 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.096959 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.096990 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.097001 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.097017 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.097029 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:52Z","lastTransitionTime":"2025-09-30T12:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.108827 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: E0930 12:20:52.110591 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.119266 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.119302 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.119311 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.119324 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.119332 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:52Z","lastTransitionTime":"2025-09-30T12:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.125082 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-ls6n9"] Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.125499 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-ls6n9" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.128969 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.129230 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.129385 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.129532 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.133091 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: E0930 12:20:52.138752 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.143208 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.143249 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.143262 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.143277 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.143288 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:52Z","lastTransitionTime":"2025-09-30T12:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.148497 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.155792 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/2ebe21b2-eb85-4b30-b911-78f6619d07f9-serviceca\") pod \"node-ca-ls6n9\" (UID: \"2ebe21b2-eb85-4b30-b911-78f6619d07f9\") " pod="openshift-image-registry/node-ca-ls6n9" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.155835 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2ebe21b2-eb85-4b30-b911-78f6619d07f9-host\") pod \"node-ca-ls6n9\" (UID: \"2ebe21b2-eb85-4b30-b911-78f6619d07f9\") " pod="openshift-image-registry/node-ca-ls6n9" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.155851 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frsmm\" (UniqueName: \"kubernetes.io/projected/2ebe21b2-eb85-4b30-b911-78f6619d07f9-kube-api-access-frsmm\") pod \"node-ca-ls6n9\" (UID: \"2ebe21b2-eb85-4b30-b911-78f6619d07f9\") " pod="openshift-image-registry/node-ca-ls6n9" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.159994 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: E0930 12:20:52.160183 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.168706 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.168748 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.168758 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.168774 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.168787 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:52Z","lastTransitionTime":"2025-09-30T12:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.180102 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: E0930 12:20:52.181615 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: E0930 12:20:52.181725 5002 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.185740 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.185801 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.185820 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.185836 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.185845 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:52Z","lastTransitionTime":"2025-09-30T12:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.201224 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.213247 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.225752 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.238315 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.247725 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.256530 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/2ebe21b2-eb85-4b30-b911-78f6619d07f9-serviceca\") pod \"node-ca-ls6n9\" (UID: \"2ebe21b2-eb85-4b30-b911-78f6619d07f9\") " pod="openshift-image-registry/node-ca-ls6n9" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.256591 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frsmm\" (UniqueName: \"kubernetes.io/projected/2ebe21b2-eb85-4b30-b911-78f6619d07f9-kube-api-access-frsmm\") pod \"node-ca-ls6n9\" (UID: \"2ebe21b2-eb85-4b30-b911-78f6619d07f9\") " pod="openshift-image-registry/node-ca-ls6n9" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.256615 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2ebe21b2-eb85-4b30-b911-78f6619d07f9-host\") pod \"node-ca-ls6n9\" (UID: \"2ebe21b2-eb85-4b30-b911-78f6619d07f9\") " pod="openshift-image-registry/node-ca-ls6n9" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.256686 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2ebe21b2-eb85-4b30-b911-78f6619d07f9-host\") pod \"node-ca-ls6n9\" (UID: \"2ebe21b2-eb85-4b30-b911-78f6619d07f9\") " pod="openshift-image-registry/node-ca-ls6n9" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.257892 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/2ebe21b2-eb85-4b30-b911-78f6619d07f9-serviceca\") pod \"node-ca-ls6n9\" (UID: \"2ebe21b2-eb85-4b30-b911-78f6619d07f9\") " pod="openshift-image-registry/node-ca-ls6n9" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.264752 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.274170 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frsmm\" (UniqueName: \"kubernetes.io/projected/2ebe21b2-eb85-4b30-b911-78f6619d07f9-kube-api-access-frsmm\") pod \"node-ca-ls6n9\" (UID: \"2ebe21b2-eb85-4b30-b911-78f6619d07f9\") " pod="openshift-image-registry/node-ca-ls6n9" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.277976 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.287803 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.288388 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.288420 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.288442 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.288459 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.288516 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:52Z","lastTransitionTime":"2025-09-30T12:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.302892 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.321505 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.336199 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.349996 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.360368 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.371531 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.390297 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.390321 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.390329 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.390341 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.390350 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:52Z","lastTransitionTime":"2025-09-30T12:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.405081 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.442981 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.492989 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.493200 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.493208 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.493221 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.493229 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:52Z","lastTransitionTime":"2025-09-30T12:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.568761 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-ls6n9" Sep 30 12:20:52 crc kubenswrapper[5002]: W0930 12:20:52.589966 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2ebe21b2_eb85_4b30_b911_78f6619d07f9.slice/crio-fc0d442accbdd23665778c4bd5e24066b5be28f55ee2ffa76a19d755605ceaca WatchSource:0}: Error finding container fc0d442accbdd23665778c4bd5e24066b5be28f55ee2ffa76a19d755605ceaca: Status 404 returned error can't find the container with id fc0d442accbdd23665778c4bd5e24066b5be28f55ee2ffa76a19d755605ceaca Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.597228 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.597270 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.597284 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.597305 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.597320 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:52Z","lastTransitionTime":"2025-09-30T12:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.675044 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.675115 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.675050 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:20:52 crc kubenswrapper[5002]: E0930 12:20:52.675164 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:20:52 crc kubenswrapper[5002]: E0930 12:20:52.675253 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:20:52 crc kubenswrapper[5002]: E0930 12:20:52.675332 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.701698 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.701956 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.701968 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.701984 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.701995 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:52Z","lastTransitionTime":"2025-09-30T12:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.805044 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.805086 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.805099 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.805113 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.805122 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:52Z","lastTransitionTime":"2025-09-30T12:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.907396 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.907436 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.907450 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.907483 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.907495 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:52Z","lastTransitionTime":"2025-09-30T12:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.982701 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" event={"ID":"7095aa7a-d067-4977-bdc5-3a45a52a6a39","Type":"ContainerStarted","Data":"8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170"} Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.982745 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" event={"ID":"7095aa7a-d067-4977-bdc5-3a45a52a6a39","Type":"ContainerStarted","Data":"bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4"} Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.982758 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" event={"ID":"7095aa7a-d067-4977-bdc5-3a45a52a6a39","Type":"ContainerStarted","Data":"a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9"} Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.984299 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-ls6n9" event={"ID":"2ebe21b2-eb85-4b30-b911-78f6619d07f9","Type":"ContainerStarted","Data":"2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741"} Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.984350 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-ls6n9" event={"ID":"2ebe21b2-eb85-4b30-b911-78f6619d07f9","Type":"ContainerStarted","Data":"fc0d442accbdd23665778c4bd5e24066b5be28f55ee2ffa76a19d755605ceaca"} Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.985989 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" event={"ID":"7b7df259-4156-484c-bedd-543ca42f2970","Type":"ContainerStarted","Data":"042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec"} Sep 30 12:20:52 crc kubenswrapper[5002]: I0930 12:20:52.998678 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:52Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.010112 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.010160 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.010174 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.010191 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.010203 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:53Z","lastTransitionTime":"2025-09-30T12:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.012321 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:53Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.025686 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:53Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.055919 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:53Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.076152 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:53Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.091042 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:53Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.102009 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:53Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.112515 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:53Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.112743 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.112990 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.113022 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.113038 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.113059 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:53Z","lastTransitionTime":"2025-09-30T12:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.130382 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:53Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.140607 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:53Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.180277 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:53Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.202133 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:53Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.215308 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.215357 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.215370 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.215384 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.215396 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:53Z","lastTransitionTime":"2025-09-30T12:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.226161 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:53Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.237865 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:53Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.252410 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:53Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.271765 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:53Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.282521 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:53Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.297349 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:53Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.309560 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:53Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.318544 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.318585 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.318593 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.318608 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.318618 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:53Z","lastTransitionTime":"2025-09-30T12:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.322388 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:53Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.331772 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:53Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.343497 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:53Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.373920 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:53Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.404174 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:53Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.421234 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.421272 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.421282 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.421296 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.421306 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:53Z","lastTransitionTime":"2025-09-30T12:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.446850 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:53Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.486540 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:53Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.523577 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.523617 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.523627 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.523643 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.523652 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:53Z","lastTransitionTime":"2025-09-30T12:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.527289 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:53Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.572397 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:53Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.613082 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:53Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.626271 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.626319 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.626334 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.626354 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.626370 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:53Z","lastTransitionTime":"2025-09-30T12:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.652885 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:53Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.729662 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.729708 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.729718 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.729738 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.729750 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:53Z","lastTransitionTime":"2025-09-30T12:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.832282 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.832330 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.832349 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.832371 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.832386 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:53Z","lastTransitionTime":"2025-09-30T12:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.935063 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.935104 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.935113 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.935127 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.935136 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:53Z","lastTransitionTime":"2025-09-30T12:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.993707 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" event={"ID":"7095aa7a-d067-4977-bdc5-3a45a52a6a39","Type":"ContainerStarted","Data":"d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f"} Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.993759 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" event={"ID":"7095aa7a-d067-4977-bdc5-3a45a52a6a39","Type":"ContainerStarted","Data":"ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6"} Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.993779 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" event={"ID":"7095aa7a-d067-4977-bdc5-3a45a52a6a39","Type":"ContainerStarted","Data":"89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9"} Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.996755 5002 generic.go:334] "Generic (PLEG): container finished" podID="7b7df259-4156-484c-bedd-543ca42f2970" containerID="042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec" exitCode=0 Sep 30 12:20:53 crc kubenswrapper[5002]: I0930 12:20:53.996793 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" event={"ID":"7b7df259-4156-484c-bedd-543ca42f2970","Type":"ContainerDied","Data":"042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec"} Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.024967 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:54Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.042963 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.043045 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.043072 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.043106 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.043132 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:54Z","lastTransitionTime":"2025-09-30T12:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.047629 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:54Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.067241 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:54Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.084827 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:54Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.097868 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:54Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.111221 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:54Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.124138 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:54Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.142090 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:54Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.146534 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.146609 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.146627 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.146651 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.146666 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:54Z","lastTransitionTime":"2025-09-30T12:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.158890 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:54Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.173013 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:54Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.184496 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:54Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.203057 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:54Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.216916 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:54Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.229032 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:54Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.249351 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.249421 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.249434 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.249452 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.249483 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:54Z","lastTransitionTime":"2025-09-30T12:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.249731 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:54Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.355905 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.355950 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.355964 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.355981 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.355993 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:54Z","lastTransitionTime":"2025-09-30T12:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.459627 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.459696 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.459722 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.459751 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.459789 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:54Z","lastTransitionTime":"2025-09-30T12:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.561777 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.561824 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.561837 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.561856 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.561868 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:54Z","lastTransitionTime":"2025-09-30T12:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.580139 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.580228 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.580260 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.580276 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.580300 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:20:54 crc kubenswrapper[5002]: E0930 12:20:54.580399 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 12:20:54 crc kubenswrapper[5002]: E0930 12:20:54.580415 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 12:20:54 crc kubenswrapper[5002]: E0930 12:20:54.580426 5002 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:20:54 crc kubenswrapper[5002]: E0930 12:20:54.580502 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:21:02.58042309 +0000 UTC m=+36.830105276 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:20:54 crc kubenswrapper[5002]: E0930 12:20:54.580562 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 12:21:02.580541514 +0000 UTC m=+36.830223760 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:20:54 crc kubenswrapper[5002]: E0930 12:20:54.580678 5002 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 12:20:54 crc kubenswrapper[5002]: E0930 12:20:54.580685 5002 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 12:20:54 crc kubenswrapper[5002]: E0930 12:20:54.580717 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 12:20:54 crc kubenswrapper[5002]: E0930 12:20:54.580739 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 12:20:54 crc kubenswrapper[5002]: E0930 12:20:54.580744 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 12:21:02.580726729 +0000 UTC m=+36.830408975 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 12:20:54 crc kubenswrapper[5002]: E0930 12:20:54.580750 5002 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:20:54 crc kubenswrapper[5002]: E0930 12:20:54.580767 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 12:21:02.580756869 +0000 UTC m=+36.830439155 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 12:20:54 crc kubenswrapper[5002]: E0930 12:20:54.580793 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 12:21:02.58078389 +0000 UTC m=+36.830466146 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.664431 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.664529 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.664548 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.664573 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.664592 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:54Z","lastTransitionTime":"2025-09-30T12:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.675879 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.675889 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:20:54 crc kubenswrapper[5002]: E0930 12:20:54.675984 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.675880 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:20:54 crc kubenswrapper[5002]: E0930 12:20:54.676093 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:20:54 crc kubenswrapper[5002]: E0930 12:20:54.676210 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.766424 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.766529 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.766546 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.766564 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.766576 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:54Z","lastTransitionTime":"2025-09-30T12:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.868791 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.868846 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.868863 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.868886 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.868904 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:54Z","lastTransitionTime":"2025-09-30T12:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.970961 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.971014 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.971031 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.971052 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:54 crc kubenswrapper[5002]: I0930 12:20:54.971066 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:54Z","lastTransitionTime":"2025-09-30T12:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.003866 5002 generic.go:334] "Generic (PLEG): container finished" podID="7b7df259-4156-484c-bedd-543ca42f2970" containerID="037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83" exitCode=0 Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.003924 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" event={"ID":"7b7df259-4156-484c-bedd-543ca42f2970","Type":"ContainerDied","Data":"037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83"} Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.031941 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:55Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.049597 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:55Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.064736 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:55Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.073097 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.073152 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.073170 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.073192 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.073209 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:55Z","lastTransitionTime":"2025-09-30T12:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.081282 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:55Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.104603 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:55Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.124235 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:55Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.136388 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:55Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.151594 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:55Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.176565 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.176653 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.176663 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.176697 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.176708 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:55Z","lastTransitionTime":"2025-09-30T12:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.178925 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:55Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.195416 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:55Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.210395 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:55Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.222149 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:55Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.233105 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:55Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.245291 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:55Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.254003 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:55Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.279088 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.279127 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.279137 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.279152 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.279162 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:55Z","lastTransitionTime":"2025-09-30T12:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.383164 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.383202 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.383211 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.383224 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.383233 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:55Z","lastTransitionTime":"2025-09-30T12:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.487137 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.487189 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.487203 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.487221 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.487239 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:55Z","lastTransitionTime":"2025-09-30T12:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.590499 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.590540 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.590548 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.590563 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.590582 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:55Z","lastTransitionTime":"2025-09-30T12:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.693061 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.693328 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.693345 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.693362 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.693372 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:55Z","lastTransitionTime":"2025-09-30T12:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.796326 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.796575 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.796642 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.796727 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.796786 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:55Z","lastTransitionTime":"2025-09-30T12:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.899377 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.899434 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.899450 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.899494 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:55 crc kubenswrapper[5002]: I0930 12:20:55.899510 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:55Z","lastTransitionTime":"2025-09-30T12:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.001999 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.002038 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.002047 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.002061 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.002070 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:56Z","lastTransitionTime":"2025-09-30T12:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.010424 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" event={"ID":"7095aa7a-d067-4977-bdc5-3a45a52a6a39","Type":"ContainerStarted","Data":"6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411"} Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.013064 5002 generic.go:334] "Generic (PLEG): container finished" podID="7b7df259-4156-484c-bedd-543ca42f2970" containerID="2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac" exitCode=0 Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.013103 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" event={"ID":"7b7df259-4156-484c-bedd-543ca42f2970","Type":"ContainerDied","Data":"2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac"} Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.030133 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.050657 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.062761 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.074156 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.095057 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.105155 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.105305 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.105364 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.105726 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.105870 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.106005 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:56Z","lastTransitionTime":"2025-09-30T12:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.120551 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.134078 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.151342 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.166196 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.187337 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.198516 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.210300 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.210581 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.210726 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.210799 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.210881 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:56Z","lastTransitionTime":"2025-09-30T12:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.211875 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.225709 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.256830 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.313329 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.313378 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.313390 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.313410 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.313423 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:56Z","lastTransitionTime":"2025-09-30T12:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.416436 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.416529 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.416552 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.416578 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.416599 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:56Z","lastTransitionTime":"2025-09-30T12:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.518794 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.518842 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.518855 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.518872 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.518887 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:56Z","lastTransitionTime":"2025-09-30T12:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.622111 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.622209 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.622244 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.622335 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.622362 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:56Z","lastTransitionTime":"2025-09-30T12:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.675788 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.675855 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.675970 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:20:56 crc kubenswrapper[5002]: E0930 12:20:56.676093 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:20:56 crc kubenswrapper[5002]: E0930 12:20:56.676280 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:20:56 crc kubenswrapper[5002]: E0930 12:20:56.676402 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.691075 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.712731 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.725077 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.725137 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.725157 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.725180 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.725197 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:56Z","lastTransitionTime":"2025-09-30T12:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.726556 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.745049 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.773715 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.791929 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.809357 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.822430 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.827060 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.827092 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.827102 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.827118 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.827130 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:56Z","lastTransitionTime":"2025-09-30T12:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.837131 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.850659 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.877802 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.904179 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.920912 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.928982 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.929015 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.929025 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.929039 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.929048 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:56Z","lastTransitionTime":"2025-09-30T12:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.936391 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:56 crc kubenswrapper[5002]: I0930 12:20:56.951353 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.019258 5002 generic.go:334] "Generic (PLEG): container finished" podID="7b7df259-4156-484c-bedd-543ca42f2970" containerID="edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d" exitCode=0 Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.019549 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" event={"ID":"7b7df259-4156-484c-bedd-543ca42f2970","Type":"ContainerDied","Data":"edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d"} Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.030888 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.030925 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.030933 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.030947 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.030956 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:57Z","lastTransitionTime":"2025-09-30T12:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.051112 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:57Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.066422 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:57Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.085045 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:57Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.097949 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:57Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.113159 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:57Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.127661 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:57Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.136114 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.136170 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.136188 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.136212 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.136234 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:57Z","lastTransitionTime":"2025-09-30T12:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.138035 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:57Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.152442 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:57Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.166177 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:57Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.207944 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:57Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.231800 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:57Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.240944 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.240980 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.240991 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.241005 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.241017 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:57Z","lastTransitionTime":"2025-09-30T12:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.264595 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:57Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.276930 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:57Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.287105 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:57Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.305033 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:57Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.342295 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.342349 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.342359 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.342372 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.342382 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:57Z","lastTransitionTime":"2025-09-30T12:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.445756 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.445815 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.445833 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.445857 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.445875 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:57Z","lastTransitionTime":"2025-09-30T12:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.548634 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.548671 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.548681 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.548693 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.548703 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:57Z","lastTransitionTime":"2025-09-30T12:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.651437 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.651533 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.651556 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.651585 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.651603 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:57Z","lastTransitionTime":"2025-09-30T12:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.754517 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.754572 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.754627 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.754672 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.754692 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:57Z","lastTransitionTime":"2025-09-30T12:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.857145 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.857192 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.857203 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.857222 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.857236 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:57Z","lastTransitionTime":"2025-09-30T12:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.959951 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.959989 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.960002 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.960018 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:57 crc kubenswrapper[5002]: I0930 12:20:57.960029 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:57Z","lastTransitionTime":"2025-09-30T12:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.026403 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" event={"ID":"7095aa7a-d067-4977-bdc5-3a45a52a6a39","Type":"ContainerStarted","Data":"95fc7cf1f23c91d4989756ce53322d40c23e54ad75a951dd4c49e23c0593a016"} Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.028281 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.028357 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.028457 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.032252 5002 generic.go:334] "Generic (PLEG): container finished" podID="7b7df259-4156-484c-bedd-543ca42f2970" containerID="7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8" exitCode=0 Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.032288 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" event={"ID":"7b7df259-4156-484c-bedd-543ca42f2970","Type":"ContainerDied","Data":"7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8"} Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.049637 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.061691 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.062885 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.062943 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.062960 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.062986 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.063007 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:58Z","lastTransitionTime":"2025-09-30T12:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.066679 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.070456 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.077276 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.097363 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.108874 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.123988 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.146931 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.164202 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.165819 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.165846 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.165857 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.165873 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.165886 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:58Z","lastTransitionTime":"2025-09-30T12:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.177885 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.190584 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.207058 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.217849 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.233452 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fc7cf1f23c91d4989756ce53322d40c23e54ad75a951dd4c49e23c0593a016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.246834 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.258129 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.268228 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.268261 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.268272 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.268289 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.268300 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:58Z","lastTransitionTime":"2025-09-30T12:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.273796 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.286999 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.299118 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.319235 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.332651 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.345130 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.361704 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.371861 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.371895 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.371904 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.371920 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.371929 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:58Z","lastTransitionTime":"2025-09-30T12:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.377626 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.392384 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.415765 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fc7cf1f23c91d4989756ce53322d40c23e54ad75a951dd4c49e23c0593a016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.430208 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.443459 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.454857 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.465149 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.473907 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.473946 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.473956 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.473970 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.473979 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:58Z","lastTransitionTime":"2025-09-30T12:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.479574 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:58Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.576106 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.576147 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.576158 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.576173 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.576184 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:58Z","lastTransitionTime":"2025-09-30T12:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.675663 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.675737 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:20:58 crc kubenswrapper[5002]: E0930 12:20:58.675829 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:20:58 crc kubenswrapper[5002]: E0930 12:20:58.676037 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.676355 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:20:58 crc kubenswrapper[5002]: E0930 12:20:58.676503 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.678070 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.678103 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.678113 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.678129 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.678142 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:58Z","lastTransitionTime":"2025-09-30T12:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.780565 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.780634 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.780651 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.780677 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.780694 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:58Z","lastTransitionTime":"2025-09-30T12:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.883184 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.883230 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.883240 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.883259 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.883269 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:58Z","lastTransitionTime":"2025-09-30T12:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.986662 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.986700 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.986710 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.986724 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:58 crc kubenswrapper[5002]: I0930 12:20:58.986734 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:58Z","lastTransitionTime":"2025-09-30T12:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.040679 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" event={"ID":"7b7df259-4156-484c-bedd-543ca42f2970","Type":"ContainerStarted","Data":"3e10fed923eacb0c5409835ec689ec74b4ef68b770774b3fe03e05db05d63c04"} Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.057559 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:59Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.067761 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:59Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.087709 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fc7cf1f23c91d4989756ce53322d40c23e54ad75a951dd4c49e23c0593a016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:59Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.093433 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.093518 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.093539 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.093563 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.093582 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:59Z","lastTransitionTime":"2025-09-30T12:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.106770 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:59Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.119706 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:59Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.133382 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:59Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.145794 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:59Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.158571 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e10fed923eacb0c5409835ec689ec74b4ef68b770774b3fe03e05db05d63c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:59Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.169951 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:59Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.180027 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:59Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.192830 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:59Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.196995 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.197033 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.197049 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.197071 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.197088 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:59Z","lastTransitionTime":"2025-09-30T12:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.213799 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:59Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.235752 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:59Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.251385 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:59Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.266697 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:20:59Z is after 2025-08-24T17:21:41Z" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.300625 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.300739 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.300759 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.300785 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.300806 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:59Z","lastTransitionTime":"2025-09-30T12:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.404688 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.404803 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.404830 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.404861 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.404884 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:59Z","lastTransitionTime":"2025-09-30T12:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.508325 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.508388 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.508405 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.508431 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.508449 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:59Z","lastTransitionTime":"2025-09-30T12:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.611606 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.611666 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.611684 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.611708 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.611726 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:59Z","lastTransitionTime":"2025-09-30T12:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.715277 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.715340 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.715356 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.715378 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.715395 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:59Z","lastTransitionTime":"2025-09-30T12:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.818458 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.818565 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.818591 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.818620 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.818646 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:59Z","lastTransitionTime":"2025-09-30T12:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.921830 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.921877 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.921886 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.921899 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:20:59 crc kubenswrapper[5002]: I0930 12:20:59.921909 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:20:59Z","lastTransitionTime":"2025-09-30T12:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.024760 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.024799 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.024808 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.024821 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.024831 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:00Z","lastTransitionTime":"2025-09-30T12:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.126809 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.126848 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.126859 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.126873 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.126882 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:00Z","lastTransitionTime":"2025-09-30T12:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.229221 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.229273 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.229285 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.229301 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.229313 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:00Z","lastTransitionTime":"2025-09-30T12:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.331528 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.331573 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.331586 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.331600 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.331611 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:00Z","lastTransitionTime":"2025-09-30T12:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.434363 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.434404 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.434412 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.434427 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.434436 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:00Z","lastTransitionTime":"2025-09-30T12:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.537625 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.537694 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.537719 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.537753 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.537776 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:00Z","lastTransitionTime":"2025-09-30T12:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.640187 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.640264 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.640274 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.640289 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.640299 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:00Z","lastTransitionTime":"2025-09-30T12:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.677687 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:00 crc kubenswrapper[5002]: E0930 12:21:00.677862 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.678104 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:00 crc kubenswrapper[5002]: E0930 12:21:00.678212 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.678276 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:00 crc kubenswrapper[5002]: E0930 12:21:00.678333 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.742805 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.742843 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.742854 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.742870 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.742882 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:00Z","lastTransitionTime":"2025-09-30T12:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.845606 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.845670 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.845692 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.845714 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.845730 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:00Z","lastTransitionTime":"2025-09-30T12:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.948504 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.948585 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.948607 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.948638 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:00 crc kubenswrapper[5002]: I0930 12:21:00.948660 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:00Z","lastTransitionTime":"2025-09-30T12:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.051656 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.051705 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.051719 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.051739 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.051753 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:01Z","lastTransitionTime":"2025-09-30T12:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.052784 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pvsr_7095aa7a-d067-4977-bdc5-3a45a52a6a39/ovnkube-controller/0.log" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.059461 5002 generic.go:334] "Generic (PLEG): container finished" podID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerID="95fc7cf1f23c91d4989756ce53322d40c23e54ad75a951dd4c49e23c0593a016" exitCode=1 Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.059525 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" event={"ID":"7095aa7a-d067-4977-bdc5-3a45a52a6a39","Type":"ContainerDied","Data":"95fc7cf1f23c91d4989756ce53322d40c23e54ad75a951dd4c49e23c0593a016"} Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.060181 5002 scope.go:117] "RemoveContainer" containerID="95fc7cf1f23c91d4989756ce53322d40c23e54ad75a951dd4c49e23c0593a016" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.084870 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:01Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.100711 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:01Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.114135 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:01Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.128766 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:01Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.146408 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:01Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.155566 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.155626 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.155637 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.155651 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.155660 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:01Z","lastTransitionTime":"2025-09-30T12:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.161746 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:01Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.172112 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:01Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.187592 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:01Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.203328 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:01Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.216216 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:01Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.231426 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:01Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.255109 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fc7cf1f23c91d4989756ce53322d40c23e54ad75a951dd4c49e23c0593a016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95fc7cf1f23c91d4989756ce53322d40c23e54ad75a951dd4c49e23c0593a016\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:00Z\\\",\\\"message\\\":\\\"alversions/factory.go:140\\\\nI0930 12:21:00.693904 6350 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 12:21:00.694464 6350 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0930 12:21:00.694998 6350 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 12:21:00.695035 6350 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 12:21:00.695058 6350 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 12:21:00.695066 6350 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 12:21:00.695088 6350 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 12:21:00.695112 6350 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 12:21:00.695123 6350 factory.go:656] Stopping watch factory\\\\nI0930 12:21:00.695132 6350 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 12:21:00.695134 6350 ovnkube.go:599] Stopped ovnkube\\\\nI0930 12:21:00.695144 6350 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 12:21:00.695154 6350 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:01Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.261661 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.261688 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.261696 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.261709 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.261718 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:01Z","lastTransitionTime":"2025-09-30T12:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.267187 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:01Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.276778 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:01Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.289547 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e10fed923eacb0c5409835ec689ec74b4ef68b770774b3fe03e05db05d63c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:01Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.363800 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.363853 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.363865 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.363881 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.363893 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:01Z","lastTransitionTime":"2025-09-30T12:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.468269 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.468310 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.468319 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.468333 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.468341 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:01Z","lastTransitionTime":"2025-09-30T12:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.570669 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.570708 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.570718 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.570733 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.570742 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:01Z","lastTransitionTime":"2025-09-30T12:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.672901 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.672930 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.672938 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.672950 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.672958 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:01Z","lastTransitionTime":"2025-09-30T12:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.775226 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.775268 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.775280 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.775295 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.775307 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:01Z","lastTransitionTime":"2025-09-30T12:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.877220 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.877245 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.877254 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.877265 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.877274 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:01Z","lastTransitionTime":"2025-09-30T12:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.979222 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.979288 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.979297 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.979311 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:01 crc kubenswrapper[5002]: I0930 12:21:01.979320 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:01Z","lastTransitionTime":"2025-09-30T12:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.066293 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pvsr_7095aa7a-d067-4977-bdc5-3a45a52a6a39/ovnkube-controller/0.log" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.071601 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" event={"ID":"7095aa7a-d067-4977-bdc5-3a45a52a6a39","Type":"ContainerStarted","Data":"8fc2e06caaa38408efd9783fa165bede103d30f2b1d556762d633a3ad3bd3436"} Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.072281 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.081942 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.081972 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.081980 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.081993 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.082013 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:02Z","lastTransitionTime":"2025-09-30T12:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.093308 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:02Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.108694 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:02Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.132504 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:02Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.152656 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:02Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.173635 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:02Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.184869 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.184954 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.184981 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.185016 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.185041 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:02Z","lastTransitionTime":"2025-09-30T12:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.186454 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:02Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.219304 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fc2e06caaa38408efd9783fa165bede103d30f2b1d556762d633a3ad3bd3436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95fc7cf1f23c91d4989756ce53322d40c23e54ad75a951dd4c49e23c0593a016\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:00Z\\\",\\\"message\\\":\\\"alversions/factory.go:140\\\\nI0930 12:21:00.693904 6350 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 12:21:00.694464 6350 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0930 12:21:00.694998 6350 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 12:21:00.695035 6350 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 12:21:00.695058 6350 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 12:21:00.695066 6350 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 12:21:00.695088 6350 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 12:21:00.695112 6350 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 12:21:00.695123 6350 factory.go:656] Stopping watch factory\\\\nI0930 12:21:00.695132 6350 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 12:21:00.695134 6350 ovnkube.go:599] Stopped ovnkube\\\\nI0930 12:21:00.695144 6350 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 12:21:00.695154 6350 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:02Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.234459 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:02Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.248529 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:02Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.273609 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e10fed923eacb0c5409835ec689ec74b4ef68b770774b3fe03e05db05d63c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:02Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.288169 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.288330 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.288409 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.288540 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.288624 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:02Z","lastTransitionTime":"2025-09-30T12:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.301319 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:02Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.319796 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:02Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.343075 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:02Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.359461 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:02Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.378140 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:02Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.391337 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.391385 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.391404 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.391427 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.391444 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:02Z","lastTransitionTime":"2025-09-30T12:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.494825 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.495043 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.495103 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.495175 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.495243 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:02Z","lastTransitionTime":"2025-09-30T12:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.508538 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.508612 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.508631 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.508651 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.508666 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:02Z","lastTransitionTime":"2025-09-30T12:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:02 crc kubenswrapper[5002]: E0930 12:21:02.529187 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:02Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.534358 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.534417 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.534433 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.534455 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.534499 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:02Z","lastTransitionTime":"2025-09-30T12:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:02 crc kubenswrapper[5002]: E0930 12:21:02.552786 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:02Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.558700 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.558860 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.559023 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.559186 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.559320 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:02Z","lastTransitionTime":"2025-09-30T12:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:02 crc kubenswrapper[5002]: E0930 12:21:02.579506 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:02Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.584305 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.584363 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.584380 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.584405 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.584422 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:02Z","lastTransitionTime":"2025-09-30T12:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:02 crc kubenswrapper[5002]: E0930 12:21:02.600789 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:02Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.605798 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.605870 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.605896 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.605932 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.605957 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:02Z","lastTransitionTime":"2025-09-30T12:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:02 crc kubenswrapper[5002]: E0930 12:21:02.622078 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:02Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:02 crc kubenswrapper[5002]: E0930 12:21:02.622271 5002 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.624499 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.624540 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.624550 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.624566 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.624576 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:02Z","lastTransitionTime":"2025-09-30T12:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.662822 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.662923 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.662981 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:02 crc kubenswrapper[5002]: E0930 12:21:02.663036 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:21:18.663005408 +0000 UTC m=+52.912687564 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.663100 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.663147 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:02 crc kubenswrapper[5002]: E0930 12:21:02.663152 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 12:21:02 crc kubenswrapper[5002]: E0930 12:21:02.663181 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 12:21:02 crc kubenswrapper[5002]: E0930 12:21:02.663185 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 12:21:02 crc kubenswrapper[5002]: E0930 12:21:02.663237 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 12:21:02 crc kubenswrapper[5002]: E0930 12:21:02.663254 5002 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:21:02 crc kubenswrapper[5002]: E0930 12:21:02.663270 5002 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 12:21:02 crc kubenswrapper[5002]: E0930 12:21:02.663300 5002 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 12:21:02 crc kubenswrapper[5002]: E0930 12:21:02.663317 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 12:21:18.663295886 +0000 UTC m=+52.912978092 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:21:02 crc kubenswrapper[5002]: E0930 12:21:02.663203 5002 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:21:02 crc kubenswrapper[5002]: E0930 12:21:02.663342 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 12:21:18.663331957 +0000 UTC m=+52.913014203 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 12:21:02 crc kubenswrapper[5002]: E0930 12:21:02.663358 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 12:21:18.663350568 +0000 UTC m=+52.913032844 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 12:21:02 crc kubenswrapper[5002]: E0930 12:21:02.663373 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 12:21:18.663365288 +0000 UTC m=+52.913047554 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.675510 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.675523 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.675610 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:02 crc kubenswrapper[5002]: E0930 12:21:02.675737 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:02 crc kubenswrapper[5002]: E0930 12:21:02.675951 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:02 crc kubenswrapper[5002]: E0930 12:21:02.676036 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.727355 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.727423 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.727435 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.727453 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.727464 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:02Z","lastTransitionTime":"2025-09-30T12:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.830093 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.830171 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.830189 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.830213 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.830230 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:02Z","lastTransitionTime":"2025-09-30T12:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.894713 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg"] Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.895248 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.900468 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.900709 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.929080 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:02Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.932739 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.932957 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.933081 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.933239 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.933367 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:02Z","lastTransitionTime":"2025-09-30T12:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.951854 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:02Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.966051 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffrqq\" (UniqueName: \"kubernetes.io/projected/c3b6a7b0-1c44-43ee-a789-36bb0cd51b87-kube-api-access-ffrqq\") pod \"ovnkube-control-plane-749d76644c-hvvbg\" (UID: \"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.966349 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c3b6a7b0-1c44-43ee-a789-36bb0cd51b87-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-hvvbg\" (UID: \"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.966392 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c3b6a7b0-1c44-43ee-a789-36bb0cd51b87-env-overrides\") pod \"ovnkube-control-plane-749d76644c-hvvbg\" (UID: \"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.966451 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c3b6a7b0-1c44-43ee-a789-36bb0cd51b87-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-hvvbg\" (UID: \"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.966912 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:02Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.979876 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:02Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:02 crc kubenswrapper[5002]: I0930 12:21:02.994116 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:02Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.005028 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvvbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:03Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.019858 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:03Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.029417 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:03Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.038975 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.039124 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.039214 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.039304 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.039379 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:03Z","lastTransitionTime":"2025-09-30T12:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.045119 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:03Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.057378 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:03Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.067661 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffrqq\" (UniqueName: \"kubernetes.io/projected/c3b6a7b0-1c44-43ee-a789-36bb0cd51b87-kube-api-access-ffrqq\") pod \"ovnkube-control-plane-749d76644c-hvvbg\" (UID: \"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.067694 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c3b6a7b0-1c44-43ee-a789-36bb0cd51b87-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-hvvbg\" (UID: \"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.067718 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c3b6a7b0-1c44-43ee-a789-36bb0cd51b87-env-overrides\") pod \"ovnkube-control-plane-749d76644c-hvvbg\" (UID: \"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.067750 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c3b6a7b0-1c44-43ee-a789-36bb0cd51b87-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-hvvbg\" (UID: \"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.068366 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c3b6a7b0-1c44-43ee-a789-36bb0cd51b87-env-overrides\") pod \"ovnkube-control-plane-749d76644c-hvvbg\" (UID: \"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.068726 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c3b6a7b0-1c44-43ee-a789-36bb0cd51b87-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-hvvbg\" (UID: \"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.070796 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:03Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.083180 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c3b6a7b0-1c44-43ee-a789-36bb0cd51b87-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-hvvbg\" (UID: \"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.085991 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pvsr_7095aa7a-d067-4977-bdc5-3a45a52a6a39/ovnkube-controller/1.log" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.086745 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pvsr_7095aa7a-d067-4977-bdc5-3a45a52a6a39/ovnkube-controller/0.log" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.087143 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffrqq\" (UniqueName: \"kubernetes.io/projected/c3b6a7b0-1c44-43ee-a789-36bb0cd51b87-kube-api-access-ffrqq\") pod \"ovnkube-control-plane-749d76644c-hvvbg\" (UID: \"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.089934 5002 generic.go:334] "Generic (PLEG): container finished" podID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerID="8fc2e06caaa38408efd9783fa165bede103d30f2b1d556762d633a3ad3bd3436" exitCode=1 Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.089974 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" event={"ID":"7095aa7a-d067-4977-bdc5-3a45a52a6a39","Type":"ContainerDied","Data":"8fc2e06caaa38408efd9783fa165bede103d30f2b1d556762d633a3ad3bd3436"} Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.090015 5002 scope.go:117] "RemoveContainer" containerID="95fc7cf1f23c91d4989756ce53322d40c23e54ad75a951dd4c49e23c0593a016" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.090760 5002 scope.go:117] "RemoveContainer" containerID="8fc2e06caaa38408efd9783fa165bede103d30f2b1d556762d633a3ad3bd3436" Sep 30 12:21:03 crc kubenswrapper[5002]: E0930 12:21:03.090926 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4pvsr_openshift-ovn-kubernetes(7095aa7a-d067-4977-bdc5-3a45a52a6a39)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.096333 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:03Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.115052 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fc2e06caaa38408efd9783fa165bede103d30f2b1d556762d633a3ad3bd3436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95fc7cf1f23c91d4989756ce53322d40c23e54ad75a951dd4c49e23c0593a016\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:00Z\\\",\\\"message\\\":\\\"alversions/factory.go:140\\\\nI0930 12:21:00.693904 6350 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 12:21:00.694464 6350 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0930 12:21:00.694998 6350 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 12:21:00.695035 6350 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 12:21:00.695058 6350 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 12:21:00.695066 6350 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 12:21:00.695088 6350 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 12:21:00.695112 6350 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 12:21:00.695123 6350 factory.go:656] Stopping watch factory\\\\nI0930 12:21:00.695132 6350 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 12:21:00.695134 6350 ovnkube.go:599] Stopped ovnkube\\\\nI0930 12:21:00.695144 6350 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 12:21:00.695154 6350 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:03Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.126371 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:03Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.138518 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:03Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.141851 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.141887 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.141898 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.141915 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.141926 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:03Z","lastTransitionTime":"2025-09-30T12:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.158910 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e10fed923eacb0c5409835ec689ec74b4ef68b770774b3fe03e05db05d63c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:03Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.213123 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:03Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.216872 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" Sep 30 12:21:03 crc kubenswrapper[5002]: W0930 12:21:03.233663 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc3b6a7b0_1c44_43ee_a789_36bb0cd51b87.slice/crio-eda786bbe641a0ee598b7bed1d55262e8c9dd9c98c157c761db4e126f83d2a17 WatchSource:0}: Error finding container eda786bbe641a0ee598b7bed1d55262e8c9dd9c98c157c761db4e126f83d2a17: Status 404 returned error can't find the container with id eda786bbe641a0ee598b7bed1d55262e8c9dd9c98c157c761db4e126f83d2a17 Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.240794 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:03Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.243381 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.243416 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.243428 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.243443 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.243453 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:03Z","lastTransitionTime":"2025-09-30T12:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.256923 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e10fed923eacb0c5409835ec689ec74b4ef68b770774b3fe03e05db05d63c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:03Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.270695 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:03Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.284017 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:03Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.295571 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:03Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.308493 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:03Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.320818 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvvbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:03Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.337635 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:03Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.346173 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.346201 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.346210 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.346223 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.346231 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:03Z","lastTransitionTime":"2025-09-30T12:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.347169 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:03Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.358345 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:03Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.369155 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:03Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.381252 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:03Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.393299 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:03Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.411809 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fc2e06caaa38408efd9783fa165bede103d30f2b1d556762d633a3ad3bd3436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95fc7cf1f23c91d4989756ce53322d40c23e54ad75a951dd4c49e23c0593a016\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:00Z\\\",\\\"message\\\":\\\"alversions/factory.go:140\\\\nI0930 12:21:00.693904 6350 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 12:21:00.694464 6350 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0930 12:21:00.694998 6350 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 12:21:00.695035 6350 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 12:21:00.695058 6350 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 12:21:00.695066 6350 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 12:21:00.695088 6350 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 12:21:00.695112 6350 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 12:21:00.695123 6350 factory.go:656] Stopping watch factory\\\\nI0930 12:21:00.695132 6350 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 12:21:00.695134 6350 ovnkube.go:599] Stopped ovnkube\\\\nI0930 12:21:00.695144 6350 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 12:21:00.695154 6350 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fc2e06caaa38408efd9783fa165bede103d30f2b1d556762d633a3ad3bd3436\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\" stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-multus/multus-admission-controller\\\\\\\"}\\\\nI0930 12:21:01.858370 6485 services_controller.go:360] Finished syncing service multus-admission-controller on namespace openshift-multus for network=default : 12.396713ms\\\\nI0930 12:21:01.858856 6485 obj_retry.go:551] Creating *factory.egressNode crc took: 15.122418ms\\\\nI0930 12:21:01.858901 6485 factory.go:1336] Added *v1.Node event handler 7\\\\nI0930 12:21:01.858970 6485 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0930 12:21:01.858989 6485 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 12:21:01.859004 6485 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 12:21:01.859053 6485 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 12:21:01.859065 6485 factory.go:656] Stopping watch factory\\\\nI0930 12:21:01.859090 6485 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 12:21:01.859506 6485 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0930 12:21:01.859639 6485 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0930 12:21:01.859673 6485 ovnkube.go:599] Stopped ovnkube\\\\nI0930 12:21:01.859702 6485 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0930 12:21:01.859768 6485 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:21:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:03Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.424483 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:03Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.448768 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.448805 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.448816 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.448832 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.448846 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:03Z","lastTransitionTime":"2025-09-30T12:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.551641 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.551670 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.551678 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.551689 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.551699 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:03Z","lastTransitionTime":"2025-09-30T12:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.653945 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.653987 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.653995 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.654008 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.654017 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:03Z","lastTransitionTime":"2025-09-30T12:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.757260 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.757327 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.757349 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.757380 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.757401 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:03Z","lastTransitionTime":"2025-09-30T12:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.860582 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.860623 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.860641 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.860666 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.860682 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:03Z","lastTransitionTime":"2025-09-30T12:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.963648 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.963933 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.963948 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.963965 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:03 crc kubenswrapper[5002]: I0930 12:21:03.963975 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:03Z","lastTransitionTime":"2025-09-30T12:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.067649 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.067721 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.067755 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.067782 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.067803 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:04Z","lastTransitionTime":"2025-09-30T12:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.098604 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" event={"ID":"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87","Type":"ContainerStarted","Data":"4537e7bf982dba689bd81821609884ac5f2530e6efa7d48cd65e05ff64d2ec6b"} Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.098674 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" event={"ID":"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87","Type":"ContainerStarted","Data":"cb569d449d2d8d49ee8d62c4815b625481cd5ddd882a135dac7465fe63fbc67e"} Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.098691 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" event={"ID":"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87","Type":"ContainerStarted","Data":"eda786bbe641a0ee598b7bed1d55262e8c9dd9c98c157c761db4e126f83d2a17"} Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.102086 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pvsr_7095aa7a-d067-4977-bdc5-3a45a52a6a39/ovnkube-controller/1.log" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.107653 5002 scope.go:117] "RemoveContainer" containerID="8fc2e06caaa38408efd9783fa165bede103d30f2b1d556762d633a3ad3bd3436" Sep 30 12:21:04 crc kubenswrapper[5002]: E0930 12:21:04.107894 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4pvsr_openshift-ovn-kubernetes(7095aa7a-d067-4977-bdc5-3a45a52a6a39)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.120664 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.139426 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb569d449d2d8d49ee8d62c4815b625481cd5ddd882a135dac7465fe63fbc67e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4537e7bf982dba689bd81821609884ac5f2530e6efa7d48cd65e05ff64d2ec6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvvbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.168572 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.170380 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.170445 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.170468 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.170536 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.170567 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:04Z","lastTransitionTime":"2025-09-30T12:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.187742 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.209220 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.226017 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.241440 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.257739 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.273025 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.273131 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.273155 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.273183 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.273205 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:04Z","lastTransitionTime":"2025-09-30T12:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.275419 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fc2e06caaa38408efd9783fa165bede103d30f2b1d556762d633a3ad3bd3436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95fc7cf1f23c91d4989756ce53322d40c23e54ad75a951dd4c49e23c0593a016\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:00Z\\\",\\\"message\\\":\\\"alversions/factory.go:140\\\\nI0930 12:21:00.693904 6350 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 12:21:00.694464 6350 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0930 12:21:00.694998 6350 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 12:21:00.695035 6350 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 12:21:00.695058 6350 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 12:21:00.695066 6350 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 12:21:00.695088 6350 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 12:21:00.695112 6350 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 12:21:00.695123 6350 factory.go:656] Stopping watch factory\\\\nI0930 12:21:00.695132 6350 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 12:21:00.695134 6350 ovnkube.go:599] Stopped ovnkube\\\\nI0930 12:21:00.695144 6350 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 12:21:00.695154 6350 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fc2e06caaa38408efd9783fa165bede103d30f2b1d556762d633a3ad3bd3436\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\" stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-multus/multus-admission-controller\\\\\\\"}\\\\nI0930 12:21:01.858370 6485 services_controller.go:360] Finished syncing service multus-admission-controller on namespace openshift-multus for network=default : 12.396713ms\\\\nI0930 12:21:01.858856 6485 obj_retry.go:551] Creating *factory.egressNode crc took: 15.122418ms\\\\nI0930 12:21:01.858901 6485 factory.go:1336] Added *v1.Node event handler 7\\\\nI0930 12:21:01.858970 6485 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0930 12:21:01.858989 6485 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 12:21:01.859004 6485 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 12:21:01.859053 6485 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 12:21:01.859065 6485 factory.go:656] Stopping watch factory\\\\nI0930 12:21:01.859090 6485 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 12:21:01.859506 6485 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0930 12:21:01.859639 6485 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0930 12:21:01.859673 6485 ovnkube.go:599] Stopped ovnkube\\\\nI0930 12:21:01.859702 6485 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0930 12:21:01.859768 6485 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:21:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.290819 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.303867 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.323583 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.335669 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.351793 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e10fed923eacb0c5409835ec689ec74b4ef68b770774b3fe03e05db05d63c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.363300 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.373072 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.375671 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.375702 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.375714 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.375727 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.375736 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:04Z","lastTransitionTime":"2025-09-30T12:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.384934 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.396374 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.410553 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-dj2ln"] Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.410705 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e10fed923eacb0c5409835ec689ec74b4ef68b770774b3fe03e05db05d63c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.410998 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:04 crc kubenswrapper[5002]: E0930 12:21:04.411041 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.425839 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.439558 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.451498 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.462298 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.471377 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb569d449d2d8d49ee8d62c4815b625481cd5ddd882a135dac7465fe63fbc67e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4537e7bf982dba689bd81821609884ac5f2530e6efa7d48cd65e05ff64d2ec6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvvbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.478465 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.478529 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.478540 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.478558 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.478568 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:04Z","lastTransitionTime":"2025-09-30T12:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.495725 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/68756e8a-d882-403f-acd7-2c41fce4446f-metrics-certs\") pod \"network-metrics-daemon-dj2ln\" (UID: \"68756e8a-d882-403f-acd7-2c41fce4446f\") " pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.495761 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdgr9\" (UniqueName: \"kubernetes.io/projected/68756e8a-d882-403f-acd7-2c41fce4446f-kube-api-access-jdgr9\") pod \"network-metrics-daemon-dj2ln\" (UID: \"68756e8a-d882-403f-acd7-2c41fce4446f\") " pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.500613 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.512184 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.523969 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.535888 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.549217 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.562435 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.580966 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.581000 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.581008 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.581020 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.581028 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:04Z","lastTransitionTime":"2025-09-30T12:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.584572 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fc2e06caaa38408efd9783fa165bede103d30f2b1d556762d633a3ad3bd3436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fc2e06caaa38408efd9783fa165bede103d30f2b1d556762d633a3ad3bd3436\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\" stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-multus/multus-admission-controller\\\\\\\"}\\\\nI0930 12:21:01.858370 6485 services_controller.go:360] Finished syncing service multus-admission-controller on namespace openshift-multus for network=default : 12.396713ms\\\\nI0930 12:21:01.858856 6485 obj_retry.go:551] Creating *factory.egressNode crc took: 15.122418ms\\\\nI0930 12:21:01.858901 6485 factory.go:1336] Added *v1.Node event handler 7\\\\nI0930 12:21:01.858970 6485 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0930 12:21:01.858989 6485 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 12:21:01.859004 6485 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 12:21:01.859053 6485 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 12:21:01.859065 6485 factory.go:656] Stopping watch factory\\\\nI0930 12:21:01.859090 6485 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 12:21:01.859506 6485 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0930 12:21:01.859639 6485 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0930 12:21:01.859673 6485 ovnkube.go:599] Stopped ovnkube\\\\nI0930 12:21:01.859702 6485 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0930 12:21:01.859768 6485 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:21:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4pvsr_openshift-ovn-kubernetes(7095aa7a-d067-4977-bdc5-3a45a52a6a39)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.596838 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/68756e8a-d882-403f-acd7-2c41fce4446f-metrics-certs\") pod \"network-metrics-daemon-dj2ln\" (UID: \"68756e8a-d882-403f-acd7-2c41fce4446f\") " pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.596888 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdgr9\" (UniqueName: \"kubernetes.io/projected/68756e8a-d882-403f-acd7-2c41fce4446f-kube-api-access-jdgr9\") pod \"network-metrics-daemon-dj2ln\" (UID: \"68756e8a-d882-403f-acd7-2c41fce4446f\") " pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:04 crc kubenswrapper[5002]: E0930 12:21:04.596999 5002 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 12:21:04 crc kubenswrapper[5002]: E0930 12:21:04.597059 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/68756e8a-d882-403f-acd7-2c41fce4446f-metrics-certs podName:68756e8a-d882-403f-acd7-2c41fce4446f nodeName:}" failed. No retries permitted until 2025-09-30 12:21:05.09704369 +0000 UTC m=+39.346725836 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/68756e8a-d882-403f-acd7-2c41fce4446f-metrics-certs") pod "network-metrics-daemon-dj2ln" (UID: "68756e8a-d882-403f-acd7-2c41fce4446f") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.602495 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.616363 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdgr9\" (UniqueName: \"kubernetes.io/projected/68756e8a-d882-403f-acd7-2c41fce4446f-kube-api-access-jdgr9\") pod \"network-metrics-daemon-dj2ln\" (UID: \"68756e8a-d882-403f-acd7-2c41fce4446f\") " pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.623344 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.640525 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.651279 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.668270 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.676037 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.676195 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:04 crc kubenswrapper[5002]: E0930 12:21:04.676273 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.676328 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:04 crc kubenswrapper[5002]: E0930 12:21:04.676453 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:04 crc kubenswrapper[5002]: E0930 12:21:04.676536 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.681265 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb569d449d2d8d49ee8d62c4815b625481cd5ddd882a135dac7465fe63fbc67e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4537e7bf982dba689bd81821609884ac5f2530e6efa7d48cd65e05ff64d2ec6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvvbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.682686 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.682765 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.682788 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.682818 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.682839 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:04Z","lastTransitionTime":"2025-09-30T12:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.700417 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.715539 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.732073 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.748278 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.766961 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.780311 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.785527 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.785577 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.785592 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.785612 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.785624 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:04Z","lastTransitionTime":"2025-09-30T12:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.798257 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fc2e06caaa38408efd9783fa165bede103d30f2b1d556762d633a3ad3bd3436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fc2e06caaa38408efd9783fa165bede103d30f2b1d556762d633a3ad3bd3436\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\" stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-multus/multus-admission-controller\\\\\\\"}\\\\nI0930 12:21:01.858370 6485 services_controller.go:360] Finished syncing service multus-admission-controller on namespace openshift-multus for network=default : 12.396713ms\\\\nI0930 12:21:01.858856 6485 obj_retry.go:551] Creating *factory.egressNode crc took: 15.122418ms\\\\nI0930 12:21:01.858901 6485 factory.go:1336] Added *v1.Node event handler 7\\\\nI0930 12:21:01.858970 6485 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0930 12:21:01.858989 6485 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 12:21:01.859004 6485 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 12:21:01.859053 6485 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 12:21:01.859065 6485 factory.go:656] Stopping watch factory\\\\nI0930 12:21:01.859090 6485 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 12:21:01.859506 6485 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0930 12:21:01.859639 6485 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0930 12:21:01.859673 6485 ovnkube.go:599] Stopped ovnkube\\\\nI0930 12:21:01.859702 6485 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0930 12:21:01.859768 6485 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:21:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4pvsr_openshift-ovn-kubernetes(7095aa7a-d067-4977-bdc5-3a45a52a6a39)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.809080 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.822016 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.834301 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.857124 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e10fed923eacb0c5409835ec689ec74b4ef68b770774b3fe03e05db05d63c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.870816 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dj2ln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68756e8a-d882-403f-acd7-2c41fce4446f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dj2ln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:04Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.887974 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.888008 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.888019 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.888034 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.888043 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:04Z","lastTransitionTime":"2025-09-30T12:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.990624 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.990710 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.990730 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.990755 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:04 crc kubenswrapper[5002]: I0930 12:21:04.990772 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:04Z","lastTransitionTime":"2025-09-30T12:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.093986 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.094043 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.094062 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.094085 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.094102 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:05Z","lastTransitionTime":"2025-09-30T12:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.102805 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/68756e8a-d882-403f-acd7-2c41fce4446f-metrics-certs\") pod \"network-metrics-daemon-dj2ln\" (UID: \"68756e8a-d882-403f-acd7-2c41fce4446f\") " pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:05 crc kubenswrapper[5002]: E0930 12:21:05.103016 5002 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 12:21:05 crc kubenswrapper[5002]: E0930 12:21:05.103103 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/68756e8a-d882-403f-acd7-2c41fce4446f-metrics-certs podName:68756e8a-d882-403f-acd7-2c41fce4446f nodeName:}" failed. No retries permitted until 2025-09-30 12:21:06.103079648 +0000 UTC m=+40.352761834 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/68756e8a-d882-403f-acd7-2c41fce4446f-metrics-certs") pod "network-metrics-daemon-dj2ln" (UID: "68756e8a-d882-403f-acd7-2c41fce4446f") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.196831 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.196888 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.196905 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.196928 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.196946 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:05Z","lastTransitionTime":"2025-09-30T12:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.299869 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.299925 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.299945 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.299972 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.299991 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:05Z","lastTransitionTime":"2025-09-30T12:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.402993 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.403403 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.403433 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.403451 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.403462 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:05Z","lastTransitionTime":"2025-09-30T12:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.507360 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.507433 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.507453 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.507506 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.507525 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:05Z","lastTransitionTime":"2025-09-30T12:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.614286 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.614348 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.614364 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.614387 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.614408 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:05Z","lastTransitionTime":"2025-09-30T12:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.675965 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:05 crc kubenswrapper[5002]: E0930 12:21:05.676209 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.677211 5002 scope.go:117] "RemoveContainer" containerID="49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.717570 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.717853 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.718001 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.718190 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.718457 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:05Z","lastTransitionTime":"2025-09-30T12:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.820612 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.820675 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.820687 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.820703 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.820714 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:05Z","lastTransitionTime":"2025-09-30T12:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.924139 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.924185 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.924200 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.924221 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:05 crc kubenswrapper[5002]: I0930 12:21:05.924235 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:05Z","lastTransitionTime":"2025-09-30T12:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.027304 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.027361 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.027378 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.027402 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.027419 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:06Z","lastTransitionTime":"2025-09-30T12:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.112874 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/68756e8a-d882-403f-acd7-2c41fce4446f-metrics-certs\") pod \"network-metrics-daemon-dj2ln\" (UID: \"68756e8a-d882-403f-acd7-2c41fce4446f\") " pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:06 crc kubenswrapper[5002]: E0930 12:21:06.113081 5002 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 12:21:06 crc kubenswrapper[5002]: E0930 12:21:06.113179 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/68756e8a-d882-403f-acd7-2c41fce4446f-metrics-certs podName:68756e8a-d882-403f-acd7-2c41fce4446f nodeName:}" failed. No retries permitted until 2025-09-30 12:21:08.113153412 +0000 UTC m=+42.362835588 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/68756e8a-d882-403f-acd7-2c41fce4446f-metrics-certs") pod "network-metrics-daemon-dj2ln" (UID: "68756e8a-d882-403f-acd7-2c41fce4446f") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.118398 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.120734 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ba33674850f0b121ea405e37924fcc085b3a3625e87432d9df38b2420920bf45"} Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.121223 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.130026 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.130095 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.130119 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.130141 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.130174 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:06Z","lastTransitionTime":"2025-09-30T12:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.141057 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.170580 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fc2e06caaa38408efd9783fa165bede103d30f2b1d556762d633a3ad3bd3436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fc2e06caaa38408efd9783fa165bede103d30f2b1d556762d633a3ad3bd3436\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\" stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-multus/multus-admission-controller\\\\\\\"}\\\\nI0930 12:21:01.858370 6485 services_controller.go:360] Finished syncing service multus-admission-controller on namespace openshift-multus for network=default : 12.396713ms\\\\nI0930 12:21:01.858856 6485 obj_retry.go:551] Creating *factory.egressNode crc took: 15.122418ms\\\\nI0930 12:21:01.858901 6485 factory.go:1336] Added *v1.Node event handler 7\\\\nI0930 12:21:01.858970 6485 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0930 12:21:01.858989 6485 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 12:21:01.859004 6485 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 12:21:01.859053 6485 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 12:21:01.859065 6485 factory.go:656] Stopping watch factory\\\\nI0930 12:21:01.859090 6485 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 12:21:01.859506 6485 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0930 12:21:01.859639 6485 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0930 12:21:01.859673 6485 ovnkube.go:599] Stopped ovnkube\\\\nI0930 12:21:01.859702 6485 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0930 12:21:01.859768 6485 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:21:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4pvsr_openshift-ovn-kubernetes(7095aa7a-d067-4977-bdc5-3a45a52a6a39)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.190528 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.211045 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.231267 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.233447 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.233516 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.233530 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.233546 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.233559 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:06Z","lastTransitionTime":"2025-09-30T12:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.246899 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.263902 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e10fed923eacb0c5409835ec689ec74b4ef68b770774b3fe03e05db05d63c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.274599 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dj2ln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68756e8a-d882-403f-acd7-2c41fce4446f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dj2ln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.287261 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.297200 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.308487 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.319114 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb569d449d2d8d49ee8d62c4815b625481cd5ddd882a135dac7465fe63fbc67e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4537e7bf982dba689bd81821609884ac5f2530e6efa7d48cd65e05ff64d2ec6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvvbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.336076 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.336117 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.336128 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.336146 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.336159 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:06Z","lastTransitionTime":"2025-09-30T12:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.336136 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.349724 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba33674850f0b121ea405e37924fcc085b3a3625e87432d9df38b2420920bf45\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.366734 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.378858 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.389231 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.438540 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.438593 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.438606 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.438622 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.438633 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:06Z","lastTransitionTime":"2025-09-30T12:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.541023 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.541059 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.541067 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.541079 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.541090 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:06Z","lastTransitionTime":"2025-09-30T12:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.644337 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.644373 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.644381 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.644394 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.644403 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:06Z","lastTransitionTime":"2025-09-30T12:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.675947 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.676034 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:06 crc kubenswrapper[5002]: E0930 12:21:06.676128 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.676185 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:06 crc kubenswrapper[5002]: E0930 12:21:06.676319 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:06 crc kubenswrapper[5002]: E0930 12:21:06.676521 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.710254 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.728142 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba33674850f0b121ea405e37924fcc085b3a3625e87432d9df38b2420920bf45\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.739544 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.745849 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.745889 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.745901 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.745944 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.745953 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:06Z","lastTransitionTime":"2025-09-30T12:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.753072 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.766553 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.779237 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb569d449d2d8d49ee8d62c4815b625481cd5ddd882a135dac7465fe63fbc67e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4537e7bf982dba689bd81821609884ac5f2530e6efa7d48cd65e05ff64d2ec6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvvbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.791875 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.802274 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.815237 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.828387 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.840415 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.849113 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.849166 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.849184 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.849212 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.849230 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:06Z","lastTransitionTime":"2025-09-30T12:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.852465 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.871508 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fc2e06caaa38408efd9783fa165bede103d30f2b1d556762d633a3ad3bd3436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fc2e06caaa38408efd9783fa165bede103d30f2b1d556762d633a3ad3bd3436\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\" stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-multus/multus-admission-controller\\\\\\\"}\\\\nI0930 12:21:01.858370 6485 services_controller.go:360] Finished syncing service multus-admission-controller on namespace openshift-multus for network=default : 12.396713ms\\\\nI0930 12:21:01.858856 6485 obj_retry.go:551] Creating *factory.egressNode crc took: 15.122418ms\\\\nI0930 12:21:01.858901 6485 factory.go:1336] Added *v1.Node event handler 7\\\\nI0930 12:21:01.858970 6485 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0930 12:21:01.858989 6485 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 12:21:01.859004 6485 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 12:21:01.859053 6485 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 12:21:01.859065 6485 factory.go:656] Stopping watch factory\\\\nI0930 12:21:01.859090 6485 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 12:21:01.859506 6485 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0930 12:21:01.859639 6485 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0930 12:21:01.859673 6485 ovnkube.go:599] Stopped ovnkube\\\\nI0930 12:21:01.859702 6485 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0930 12:21:01.859768 6485 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:21:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4pvsr_openshift-ovn-kubernetes(7095aa7a-d067-4977-bdc5-3a45a52a6a39)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.886098 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.901284 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.916534 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e10fed923eacb0c5409835ec689ec74b4ef68b770774b3fe03e05db05d63c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.932406 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dj2ln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68756e8a-d882-403f-acd7-2c41fce4446f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dj2ln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:06Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.953737 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.953821 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.953845 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.953876 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:06 crc kubenswrapper[5002]: I0930 12:21:06.953902 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:06Z","lastTransitionTime":"2025-09-30T12:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.056546 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.056584 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.056592 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.056605 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.056615 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:07Z","lastTransitionTime":"2025-09-30T12:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.159428 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.159517 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.159530 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.159548 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.159560 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:07Z","lastTransitionTime":"2025-09-30T12:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.262455 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.262536 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.262553 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.262577 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.262595 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:07Z","lastTransitionTime":"2025-09-30T12:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.365256 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.365317 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.365334 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.365353 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.365365 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:07Z","lastTransitionTime":"2025-09-30T12:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.468542 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.468603 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.468622 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.468644 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.468662 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:07Z","lastTransitionTime":"2025-09-30T12:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.571942 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.572010 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.572036 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.572066 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.572087 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:07Z","lastTransitionTime":"2025-09-30T12:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.675076 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:07 crc kubenswrapper[5002]: E0930 12:21:07.675309 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.675932 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.675987 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.675995 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.676010 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.676021 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:07Z","lastTransitionTime":"2025-09-30T12:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.779057 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.779149 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.779176 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.779208 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.779229 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:07Z","lastTransitionTime":"2025-09-30T12:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.882651 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.882709 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.882726 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.882750 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.882768 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:07Z","lastTransitionTime":"2025-09-30T12:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.989638 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.989706 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.989925 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.989951 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:07 crc kubenswrapper[5002]: I0930 12:21:07.989968 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:07Z","lastTransitionTime":"2025-09-30T12:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.093799 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.093875 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.093894 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.093920 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.093938 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:08Z","lastTransitionTime":"2025-09-30T12:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.132982 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/68756e8a-d882-403f-acd7-2c41fce4446f-metrics-certs\") pod \"network-metrics-daemon-dj2ln\" (UID: \"68756e8a-d882-403f-acd7-2c41fce4446f\") " pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:08 crc kubenswrapper[5002]: E0930 12:21:08.133155 5002 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 12:21:08 crc kubenswrapper[5002]: E0930 12:21:08.133215 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/68756e8a-d882-403f-acd7-2c41fce4446f-metrics-certs podName:68756e8a-d882-403f-acd7-2c41fce4446f nodeName:}" failed. No retries permitted until 2025-09-30 12:21:12.133198735 +0000 UTC m=+46.382880881 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/68756e8a-d882-403f-acd7-2c41fce4446f-metrics-certs") pod "network-metrics-daemon-dj2ln" (UID: "68756e8a-d882-403f-acd7-2c41fce4446f") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.196843 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.196893 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.196909 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.196930 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.196941 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:08Z","lastTransitionTime":"2025-09-30T12:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.299780 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.299831 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.299846 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.299866 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.299878 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:08Z","lastTransitionTime":"2025-09-30T12:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.402299 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.402352 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.402371 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.402395 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.402412 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:08Z","lastTransitionTime":"2025-09-30T12:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.505135 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.505198 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.505220 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.505249 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.505272 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:08Z","lastTransitionTime":"2025-09-30T12:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.607547 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.607609 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.607632 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.607658 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.607675 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:08Z","lastTransitionTime":"2025-09-30T12:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.675525 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.675635 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.675662 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:08 crc kubenswrapper[5002]: E0930 12:21:08.675864 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:08 crc kubenswrapper[5002]: E0930 12:21:08.676002 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:08 crc kubenswrapper[5002]: E0930 12:21:08.676237 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.710753 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.710796 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.710809 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.710827 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.710841 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:08Z","lastTransitionTime":"2025-09-30T12:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.813865 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.813933 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.813951 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.813976 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.813992 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:08Z","lastTransitionTime":"2025-09-30T12:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.916561 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.916629 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.916658 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.916689 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:08 crc kubenswrapper[5002]: I0930 12:21:08.916708 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:08Z","lastTransitionTime":"2025-09-30T12:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.020156 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.020231 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.020251 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.020275 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.020292 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:09Z","lastTransitionTime":"2025-09-30T12:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.124157 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.124218 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.124237 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.124287 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.124305 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:09Z","lastTransitionTime":"2025-09-30T12:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.227017 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.227088 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.227127 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.227165 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.227188 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:09Z","lastTransitionTime":"2025-09-30T12:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.330447 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.330535 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.330556 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.330580 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.330598 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:09Z","lastTransitionTime":"2025-09-30T12:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.434101 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.434159 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.434176 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.434200 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.434217 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:09Z","lastTransitionTime":"2025-09-30T12:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.537145 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.537208 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.537226 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.537251 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.537270 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:09Z","lastTransitionTime":"2025-09-30T12:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.641140 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.641215 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.641233 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.641257 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.641275 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:09Z","lastTransitionTime":"2025-09-30T12:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.675699 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:09 crc kubenswrapper[5002]: E0930 12:21:09.675880 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.745018 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.745067 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.745085 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.745109 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.745127 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:09Z","lastTransitionTime":"2025-09-30T12:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.848220 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.848645 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.848804 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.848954 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.849091 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:09Z","lastTransitionTime":"2025-09-30T12:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.952444 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.952577 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.952602 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.952632 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:09 crc kubenswrapper[5002]: I0930 12:21:09.952653 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:09Z","lastTransitionTime":"2025-09-30T12:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.055079 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.055155 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.055178 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.055208 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.055229 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:10Z","lastTransitionTime":"2025-09-30T12:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.157846 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.157901 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.157918 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.157942 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.157965 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:10Z","lastTransitionTime":"2025-09-30T12:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.260735 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.261119 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.261260 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.261391 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.261603 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:10Z","lastTransitionTime":"2025-09-30T12:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.364522 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.364569 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.364580 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.364595 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.364605 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:10Z","lastTransitionTime":"2025-09-30T12:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.467500 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.467557 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.467575 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.467598 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.467614 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:10Z","lastTransitionTime":"2025-09-30T12:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.571086 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.571133 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.571147 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.571164 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.571174 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:10Z","lastTransitionTime":"2025-09-30T12:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.673992 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.674053 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.674071 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.674094 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.674111 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:10Z","lastTransitionTime":"2025-09-30T12:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.675216 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.675316 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.675219 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:10 crc kubenswrapper[5002]: E0930 12:21:10.675514 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:10 crc kubenswrapper[5002]: E0930 12:21:10.675614 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:10 crc kubenswrapper[5002]: E0930 12:21:10.675722 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.777153 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.777193 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.777210 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.777231 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.777248 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:10Z","lastTransitionTime":"2025-09-30T12:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.880310 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.880372 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.880390 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.880413 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.880430 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:10Z","lastTransitionTime":"2025-09-30T12:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.983784 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.983823 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.983832 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.983845 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:10 crc kubenswrapper[5002]: I0930 12:21:10.983855 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:10Z","lastTransitionTime":"2025-09-30T12:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.085907 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.085953 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.085968 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.085988 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.086008 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:11Z","lastTransitionTime":"2025-09-30T12:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.189571 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.189637 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.189652 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.189680 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.189702 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:11Z","lastTransitionTime":"2025-09-30T12:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.293285 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.293351 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.293364 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.293394 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.293410 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:11Z","lastTransitionTime":"2025-09-30T12:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.396342 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.396377 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.396386 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.396400 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.396409 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:11Z","lastTransitionTime":"2025-09-30T12:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.500007 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.500111 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.500129 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.500186 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.500206 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:11Z","lastTransitionTime":"2025-09-30T12:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.603673 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.603730 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.603738 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.603751 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.603781 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:11Z","lastTransitionTime":"2025-09-30T12:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.675699 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:11 crc kubenswrapper[5002]: E0930 12:21:11.675911 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.707895 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.708122 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.708181 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.708218 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.708285 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:11Z","lastTransitionTime":"2025-09-30T12:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.811518 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.811776 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.811902 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.811986 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.812065 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:11Z","lastTransitionTime":"2025-09-30T12:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.914629 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.914953 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.915122 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.915318 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:11 crc kubenswrapper[5002]: I0930 12:21:11.915826 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:11Z","lastTransitionTime":"2025-09-30T12:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.018762 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.018802 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.018814 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.018831 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.018842 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:12Z","lastTransitionTime":"2025-09-30T12:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.121544 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.121619 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.121650 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.121678 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.121704 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:12Z","lastTransitionTime":"2025-09-30T12:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.178228 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/68756e8a-d882-403f-acd7-2c41fce4446f-metrics-certs\") pod \"network-metrics-daemon-dj2ln\" (UID: \"68756e8a-d882-403f-acd7-2c41fce4446f\") " pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:12 crc kubenswrapper[5002]: E0930 12:21:12.178400 5002 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 12:21:12 crc kubenswrapper[5002]: E0930 12:21:12.178531 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/68756e8a-d882-403f-acd7-2c41fce4446f-metrics-certs podName:68756e8a-d882-403f-acd7-2c41fce4446f nodeName:}" failed. No retries permitted until 2025-09-30 12:21:20.178503044 +0000 UTC m=+54.428185240 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/68756e8a-d882-403f-acd7-2c41fce4446f-metrics-certs") pod "network-metrics-daemon-dj2ln" (UID: "68756e8a-d882-403f-acd7-2c41fce4446f") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.224141 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.224208 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.224231 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.224262 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.224287 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:12Z","lastTransitionTime":"2025-09-30T12:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.326546 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.326580 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.326591 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.326605 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.326616 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:12Z","lastTransitionTime":"2025-09-30T12:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.429156 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.429211 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.429225 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.429241 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.429253 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:12Z","lastTransitionTime":"2025-09-30T12:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.534664 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.534725 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.534747 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.534776 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.534799 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:12Z","lastTransitionTime":"2025-09-30T12:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.637769 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.637811 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.637820 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.637834 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.637842 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:12Z","lastTransitionTime":"2025-09-30T12:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.675164 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:12 crc kubenswrapper[5002]: E0930 12:21:12.675380 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.675781 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.675813 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:12 crc kubenswrapper[5002]: E0930 12:21:12.676193 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:12 crc kubenswrapper[5002]: E0930 12:21:12.676396 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.739893 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.739920 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.739928 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.739942 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.739952 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:12Z","lastTransitionTime":"2025-09-30T12:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.842699 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.842750 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.842767 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.842805 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.842821 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:12Z","lastTransitionTime":"2025-09-30T12:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.949534 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.949604 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.949628 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.949658 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:12 crc kubenswrapper[5002]: I0930 12:21:12.949683 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:12Z","lastTransitionTime":"2025-09-30T12:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.004070 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.004626 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.004865 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.005133 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.005656 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:13Z","lastTransitionTime":"2025-09-30T12:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:13 crc kubenswrapper[5002]: E0930 12:21:13.022522 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:13Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.026996 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.027169 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.027248 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.027329 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.027388 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:13Z","lastTransitionTime":"2025-09-30T12:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:13 crc kubenswrapper[5002]: E0930 12:21:13.043162 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:13Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.048931 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.049001 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.049021 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.049048 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.049068 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:13Z","lastTransitionTime":"2025-09-30T12:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:13 crc kubenswrapper[5002]: E0930 12:21:13.063633 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:13Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.068054 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.068130 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.068156 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.068187 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.068209 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:13Z","lastTransitionTime":"2025-09-30T12:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:13 crc kubenswrapper[5002]: E0930 12:21:13.084149 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:13Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.088863 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.088942 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.088961 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.088983 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.089002 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:13Z","lastTransitionTime":"2025-09-30T12:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:13 crc kubenswrapper[5002]: E0930 12:21:13.103396 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:13Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:13 crc kubenswrapper[5002]: E0930 12:21:13.103580 5002 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.105420 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.105457 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.105492 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.105511 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.105524 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:13Z","lastTransitionTime":"2025-09-30T12:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.208413 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.208527 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.208549 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.208617 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.208639 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:13Z","lastTransitionTime":"2025-09-30T12:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.311462 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.311551 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.311569 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.311595 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.311614 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:13Z","lastTransitionTime":"2025-09-30T12:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.414909 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.414977 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.415000 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.415034 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.415058 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:13Z","lastTransitionTime":"2025-09-30T12:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.518062 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.518101 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.518112 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.518126 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.518136 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:13Z","lastTransitionTime":"2025-09-30T12:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.621451 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.621773 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.622019 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.622260 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.622523 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:13Z","lastTransitionTime":"2025-09-30T12:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.675140 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:13 crc kubenswrapper[5002]: E0930 12:21:13.675596 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.725335 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.725728 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.725868 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.726112 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.726287 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:13Z","lastTransitionTime":"2025-09-30T12:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.824581 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.831253 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.831518 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.831894 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.832508 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.832892 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:13Z","lastTransitionTime":"2025-09-30T12:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.835024 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.843195 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb569d449d2d8d49ee8d62c4815b625481cd5ddd882a135dac7465fe63fbc67e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4537e7bf982dba689bd81821609884ac5f2530e6efa7d48cd65e05ff64d2ec6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvvbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:13Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.878804 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:13Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.901795 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba33674850f0b121ea405e37924fcc085b3a3625e87432d9df38b2420920bf45\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:13Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.924008 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:13Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.935930 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.935976 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.935993 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.936014 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.936034 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:13Z","lastTransitionTime":"2025-09-30T12:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.939778 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:13Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.958959 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:13Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.977107 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:13Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:13 crc kubenswrapper[5002]: I0930 12:21:13.992945 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:13Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.012770 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:14Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.032338 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:14Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.037994 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.038032 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.038041 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.038054 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.038063 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:14Z","lastTransitionTime":"2025-09-30T12:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.052325 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:14Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.067811 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:14Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.096575 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fc2e06caaa38408efd9783fa165bede103d30f2b1d556762d633a3ad3bd3436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fc2e06caaa38408efd9783fa165bede103d30f2b1d556762d633a3ad3bd3436\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\" stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-multus/multus-admission-controller\\\\\\\"}\\\\nI0930 12:21:01.858370 6485 services_controller.go:360] Finished syncing service multus-admission-controller on namespace openshift-multus for network=default : 12.396713ms\\\\nI0930 12:21:01.858856 6485 obj_retry.go:551] Creating *factory.egressNode crc took: 15.122418ms\\\\nI0930 12:21:01.858901 6485 factory.go:1336] Added *v1.Node event handler 7\\\\nI0930 12:21:01.858970 6485 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0930 12:21:01.858989 6485 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 12:21:01.859004 6485 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 12:21:01.859053 6485 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 12:21:01.859065 6485 factory.go:656] Stopping watch factory\\\\nI0930 12:21:01.859090 6485 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 12:21:01.859506 6485 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0930 12:21:01.859639 6485 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0930 12:21:01.859673 6485 ovnkube.go:599] Stopped ovnkube\\\\nI0930 12:21:01.859702 6485 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0930 12:21:01.859768 6485 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:21:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4pvsr_openshift-ovn-kubernetes(7095aa7a-d067-4977-bdc5-3a45a52a6a39)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:14Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.109192 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dj2ln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68756e8a-d882-403f-acd7-2c41fce4446f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dj2ln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:14Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.122993 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:14Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.135880 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:14Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.140731 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.140780 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.140793 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.140811 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.140823 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:14Z","lastTransitionTime":"2025-09-30T12:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.159146 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e10fed923eacb0c5409835ec689ec74b4ef68b770774b3fe03e05db05d63c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:14Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.242400 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.242608 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.242666 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.242723 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.242810 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:14Z","lastTransitionTime":"2025-09-30T12:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.346033 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.346064 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.346072 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.346084 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.346094 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:14Z","lastTransitionTime":"2025-09-30T12:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.449401 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.449745 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.449837 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.449975 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.450084 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:14Z","lastTransitionTime":"2025-09-30T12:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.553653 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.554566 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.554715 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.554882 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.555020 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:14Z","lastTransitionTime":"2025-09-30T12:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.658696 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.658751 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.658769 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.658802 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.658835 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:14Z","lastTransitionTime":"2025-09-30T12:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.675754 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.675798 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.675852 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:14 crc kubenswrapper[5002]: E0930 12:21:14.675968 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:14 crc kubenswrapper[5002]: E0930 12:21:14.676166 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:14 crc kubenswrapper[5002]: E0930 12:21:14.676337 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.762159 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.762195 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.762205 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.762220 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.762233 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:14Z","lastTransitionTime":"2025-09-30T12:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.864735 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.864776 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.864795 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.864810 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.864821 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:14Z","lastTransitionTime":"2025-09-30T12:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.967184 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.967239 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.967261 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.967288 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:14 crc kubenswrapper[5002]: I0930 12:21:14.967317 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:14Z","lastTransitionTime":"2025-09-30T12:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.069821 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.070076 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.070141 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.070209 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.070292 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:15Z","lastTransitionTime":"2025-09-30T12:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.173765 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.173807 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.173816 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.173829 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.173838 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:15Z","lastTransitionTime":"2025-09-30T12:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.277097 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.277144 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.277157 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.277177 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.277191 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:15Z","lastTransitionTime":"2025-09-30T12:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.379812 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.379901 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.379917 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.379941 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.379957 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:15Z","lastTransitionTime":"2025-09-30T12:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.482601 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.482648 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.482660 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.482677 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.482689 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:15Z","lastTransitionTime":"2025-09-30T12:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.586401 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.586512 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.586539 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.586569 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.586595 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:15Z","lastTransitionTime":"2025-09-30T12:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.675263 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:15 crc kubenswrapper[5002]: E0930 12:21:15.675540 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.689579 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.689641 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.689659 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.689685 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.689705 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:15Z","lastTransitionTime":"2025-09-30T12:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.792567 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.792608 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.792618 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.792637 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.792649 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:15Z","lastTransitionTime":"2025-09-30T12:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.894734 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.894781 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.894791 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.894809 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.894821 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:15Z","lastTransitionTime":"2025-09-30T12:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.997908 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.997979 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.998003 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.998030 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:15 crc kubenswrapper[5002]: I0930 12:21:15.998051 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:15Z","lastTransitionTime":"2025-09-30T12:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.100846 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.100911 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.100934 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.100965 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.100989 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:16Z","lastTransitionTime":"2025-09-30T12:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.204252 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.204335 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.204360 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.204392 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.204414 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:16Z","lastTransitionTime":"2025-09-30T12:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.308220 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.308303 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.308327 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.308357 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.308378 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:16Z","lastTransitionTime":"2025-09-30T12:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.411707 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.412028 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.412174 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.412307 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.412435 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:16Z","lastTransitionTime":"2025-09-30T12:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.515181 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.515576 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.515785 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.515995 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.516222 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:16Z","lastTransitionTime":"2025-09-30T12:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.619754 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.619820 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.619836 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.619859 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.619874 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:16Z","lastTransitionTime":"2025-09-30T12:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.675877 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:16 crc kubenswrapper[5002]: E0930 12:21:16.676026 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.676317 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:16 crc kubenswrapper[5002]: E0930 12:21:16.676382 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.676568 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.676725 5002 scope.go:117] "RemoveContainer" containerID="8fc2e06caaa38408efd9783fa165bede103d30f2b1d556762d633a3ad3bd3436" Sep 30 12:21:16 crc kubenswrapper[5002]: E0930 12:21:16.676757 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.696816 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:16Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.712743 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:16Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.724028 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.724095 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.724113 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.724139 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.724155 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:16Z","lastTransitionTime":"2025-09-30T12:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.736913 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e10fed923eacb0c5409835ec689ec74b4ef68b770774b3fe03e05db05d63c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:16Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.751573 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dj2ln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68756e8a-d882-403f-acd7-2c41fce4446f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dj2ln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:16Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.768459 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98317299-6ce9-4196-a387-becb81461d6a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6057b9d238b4d4dd1329f0ffdbfd88f2f8869c06e5eb681041229a15dd4a08dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f9b5d0a287ad67a6f1008d06b0d371a522722280353ed9001f81198b9295ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0dba52ee6bddc02187198fe6c409abf3d336df93b558b27fc7f76e7ec8f2b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:16Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.786843 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:16Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.801195 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:16Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.815888 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:16Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.825952 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb569d449d2d8d49ee8d62c4815b625481cd5ddd882a135dac7465fe63fbc67e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4537e7bf982dba689bd81821609884ac5f2530e6efa7d48cd65e05ff64d2ec6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvvbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:16Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.826565 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.826612 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.826629 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.826649 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.826663 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:16Z","lastTransitionTime":"2025-09-30T12:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.844800 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:16Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.857588 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba33674850f0b121ea405e37924fcc085b3a3625e87432d9df38b2420920bf45\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:16Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.874084 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:16Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.886158 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:16Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.899579 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:16Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.911893 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:16Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.928529 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.928566 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.928577 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.928595 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.928607 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:16Z","lastTransitionTime":"2025-09-30T12:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.934045 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fc2e06caaa38408efd9783fa165bede103d30f2b1d556762d633a3ad3bd3436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fc2e06caaa38408efd9783fa165bede103d30f2b1d556762d633a3ad3bd3436\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\" stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-multus/multus-admission-controller\\\\\\\"}\\\\nI0930 12:21:01.858370 6485 services_controller.go:360] Finished syncing service multus-admission-controller on namespace openshift-multus for network=default : 12.396713ms\\\\nI0930 12:21:01.858856 6485 obj_retry.go:551] Creating *factory.egressNode crc took: 15.122418ms\\\\nI0930 12:21:01.858901 6485 factory.go:1336] Added *v1.Node event handler 7\\\\nI0930 12:21:01.858970 6485 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0930 12:21:01.858989 6485 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 12:21:01.859004 6485 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 12:21:01.859053 6485 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 12:21:01.859065 6485 factory.go:656] Stopping watch factory\\\\nI0930 12:21:01.859090 6485 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 12:21:01.859506 6485 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0930 12:21:01.859639 6485 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0930 12:21:01.859673 6485 ovnkube.go:599] Stopped ovnkube\\\\nI0930 12:21:01.859702 6485 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0930 12:21:01.859768 6485 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:21:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4pvsr_openshift-ovn-kubernetes(7095aa7a-d067-4977-bdc5-3a45a52a6a39)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:16Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.949607 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:16Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:16 crc kubenswrapper[5002]: I0930 12:21:16.962020 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:16Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.031731 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.031772 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.031786 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.031802 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.031816 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:17Z","lastTransitionTime":"2025-09-30T12:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.133629 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.133682 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.133697 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.133717 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.133731 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:17Z","lastTransitionTime":"2025-09-30T12:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.159988 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pvsr_7095aa7a-d067-4977-bdc5-3a45a52a6a39/ovnkube-controller/1.log" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.162331 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" event={"ID":"7095aa7a-d067-4977-bdc5-3a45a52a6a39","Type":"ContainerStarted","Data":"4e8af62b8de35a0c80cb0fafe3fc81375703e08d812d830cc738ee9bf7c102ee"} Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.163372 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.187494 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:17Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.205315 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:17Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.216822 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:17Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.227954 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:17Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.236021 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.236058 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.236066 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.236080 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.236089 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:17Z","lastTransitionTime":"2025-09-30T12:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.245927 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e8af62b8de35a0c80cb0fafe3fc81375703e08d812d830cc738ee9bf7c102ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fc2e06caaa38408efd9783fa165bede103d30f2b1d556762d633a3ad3bd3436\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\" stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-multus/multus-admission-controller\\\\\\\"}\\\\nI0930 12:21:01.858370 6485 services_controller.go:360] Finished syncing service multus-admission-controller on namespace openshift-multus for network=default : 12.396713ms\\\\nI0930 12:21:01.858856 6485 obj_retry.go:551] Creating *factory.egressNode crc took: 15.122418ms\\\\nI0930 12:21:01.858901 6485 factory.go:1336] Added *v1.Node event handler 7\\\\nI0930 12:21:01.858970 6485 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0930 12:21:01.858989 6485 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 12:21:01.859004 6485 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 12:21:01.859053 6485 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 12:21:01.859065 6485 factory.go:656] Stopping watch factory\\\\nI0930 12:21:01.859090 6485 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 12:21:01.859506 6485 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0930 12:21:01.859639 6485 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0930 12:21:01.859673 6485 ovnkube.go:599] Stopped ovnkube\\\\nI0930 12:21:01.859702 6485 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0930 12:21:01.859768 6485 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:21:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:17Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.256277 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dj2ln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68756e8a-d882-403f-acd7-2c41fce4446f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dj2ln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:17Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.267089 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98317299-6ce9-4196-a387-becb81461d6a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6057b9d238b4d4dd1329f0ffdbfd88f2f8869c06e5eb681041229a15dd4a08dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f9b5d0a287ad67a6f1008d06b0d371a522722280353ed9001f81198b9295ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0dba52ee6bddc02187198fe6c409abf3d336df93b558b27fc7f76e7ec8f2b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:17Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.277733 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:17Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.286581 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:17Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.300325 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e10fed923eacb0c5409835ec689ec74b4ef68b770774b3fe03e05db05d63c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:17Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.311644 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb569d449d2d8d49ee8d62c4815b625481cd5ddd882a135dac7465fe63fbc67e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4537e7bf982dba689bd81821609884ac5f2530e6efa7d48cd65e05ff64d2ec6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvvbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:17Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.331493 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:17Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.341558 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.341603 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.341624 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.341642 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.341653 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:17Z","lastTransitionTime":"2025-09-30T12:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.346331 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba33674850f0b121ea405e37924fcc085b3a3625e87432d9df38b2420920bf45\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:17Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.361041 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:17Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.372603 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:17Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.383149 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:17Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.394070 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:17Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.402633 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:17Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.443036 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.443070 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.443079 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.443092 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.443102 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:17Z","lastTransitionTime":"2025-09-30T12:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.546212 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.546282 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.546300 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.546323 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.546340 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:17Z","lastTransitionTime":"2025-09-30T12:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.648746 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.648785 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.648796 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.648811 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.648822 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:17Z","lastTransitionTime":"2025-09-30T12:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.675007 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:17 crc kubenswrapper[5002]: E0930 12:21:17.675115 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.751364 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.751438 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.751458 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.751508 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.751530 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:17Z","lastTransitionTime":"2025-09-30T12:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.854430 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.854541 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.854570 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.854599 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.854617 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:17Z","lastTransitionTime":"2025-09-30T12:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.957613 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.957674 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.957684 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.957701 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:17 crc kubenswrapper[5002]: I0930 12:21:17.957712 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:17Z","lastTransitionTime":"2025-09-30T12:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.060457 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.060548 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.060567 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.060612 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.060630 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:18Z","lastTransitionTime":"2025-09-30T12:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.165234 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.165513 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.165543 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.165571 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.165594 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:18Z","lastTransitionTime":"2025-09-30T12:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.168874 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pvsr_7095aa7a-d067-4977-bdc5-3a45a52a6a39/ovnkube-controller/2.log" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.169984 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pvsr_7095aa7a-d067-4977-bdc5-3a45a52a6a39/ovnkube-controller/1.log" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.174448 5002 generic.go:334] "Generic (PLEG): container finished" podID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerID="4e8af62b8de35a0c80cb0fafe3fc81375703e08d812d830cc738ee9bf7c102ee" exitCode=1 Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.174544 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" event={"ID":"7095aa7a-d067-4977-bdc5-3a45a52a6a39","Type":"ContainerDied","Data":"4e8af62b8de35a0c80cb0fafe3fc81375703e08d812d830cc738ee9bf7c102ee"} Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.174599 5002 scope.go:117] "RemoveContainer" containerID="8fc2e06caaa38408efd9783fa165bede103d30f2b1d556762d633a3ad3bd3436" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.176319 5002 scope.go:117] "RemoveContainer" containerID="4e8af62b8de35a0c80cb0fafe3fc81375703e08d812d830cc738ee9bf7c102ee" Sep 30 12:21:18 crc kubenswrapper[5002]: E0930 12:21:18.176775 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4pvsr_openshift-ovn-kubernetes(7095aa7a-d067-4977-bdc5-3a45a52a6a39)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.193362 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba33674850f0b121ea405e37924fcc085b3a3625e87432d9df38b2420920bf45\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:18Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.214855 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:18Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.231937 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:18Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.246296 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:18Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.261864 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb569d449d2d8d49ee8d62c4815b625481cd5ddd882a135dac7465fe63fbc67e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4537e7bf982dba689bd81821609884ac5f2530e6efa7d48cd65e05ff64d2ec6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvvbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:18Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.268519 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.268569 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.268585 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.268611 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.268630 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:18Z","lastTransitionTime":"2025-09-30T12:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.292578 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:18Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.307948 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:18Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.326463 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:18Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.341748 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:18Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.357423 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:18Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.371396 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.371458 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.371515 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.371548 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.371573 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:18Z","lastTransitionTime":"2025-09-30T12:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.373633 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:18Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.402629 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e8af62b8de35a0c80cb0fafe3fc81375703e08d812d830cc738ee9bf7c102ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fc2e06caaa38408efd9783fa165bede103d30f2b1d556762d633a3ad3bd3436\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"message\\\":\\\" stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-multus/multus-admission-controller\\\\\\\"}\\\\nI0930 12:21:01.858370 6485 services_controller.go:360] Finished syncing service multus-admission-controller on namespace openshift-multus for network=default : 12.396713ms\\\\nI0930 12:21:01.858856 6485 obj_retry.go:551] Creating *factory.egressNode crc took: 15.122418ms\\\\nI0930 12:21:01.858901 6485 factory.go:1336] Added *v1.Node event handler 7\\\\nI0930 12:21:01.858970 6485 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0930 12:21:01.858989 6485 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 12:21:01.859004 6485 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 12:21:01.859053 6485 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 12:21:01.859065 6485 factory.go:656] Stopping watch factory\\\\nI0930 12:21:01.859090 6485 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 12:21:01.859506 6485 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0930 12:21:01.859639 6485 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0930 12:21:01.859673 6485 ovnkube.go:599] Stopped ovnkube\\\\nI0930 12:21:01.859702 6485 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0930 12:21:01.859768 6485 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:21:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e8af62b8de35a0c80cb0fafe3fc81375703e08d812d830cc738ee9bf7c102ee\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:17Z\\\",\\\"message\\\":\\\"rc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:17Z is after 2025-08-24T17:21:41Z]\\\\nI0930 12:21:17.550120 6732 transact.go:42] Configuring OVN: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c94130be-172c-477c-88c4-40cc7eba30fe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:18Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.418174 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:18Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.434612 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98317299-6ce9-4196-a387-becb81461d6a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6057b9d238b4d4dd1329f0ffdbfd88f2f8869c06e5eb681041229a15dd4a08dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f9b5d0a287ad67a6f1008d06b0d371a522722280353ed9001f81198b9295ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0dba52ee6bddc02187198fe6c409abf3d336df93b558b27fc7f76e7ec8f2b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:18Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.448104 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:18Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.460339 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:18Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.474077 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.474130 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.474188 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.474221 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.474239 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:18Z","lastTransitionTime":"2025-09-30T12:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.480508 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e10fed923eacb0c5409835ec689ec74b4ef68b770774b3fe03e05db05d63c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:18Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.495044 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dj2ln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68756e8a-d882-403f-acd7-2c41fce4446f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dj2ln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:18Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.576867 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.577103 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.577238 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.577337 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.577430 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:18Z","lastTransitionTime":"2025-09-30T12:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.675214 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.675326 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.675390 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:18 crc kubenswrapper[5002]: E0930 12:21:18.675615 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:18 crc kubenswrapper[5002]: E0930 12:21:18.675714 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:18 crc kubenswrapper[5002]: E0930 12:21:18.675809 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.680003 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.680059 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.680083 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.680129 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.680153 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:18Z","lastTransitionTime":"2025-09-30T12:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.751805 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:21:18 crc kubenswrapper[5002]: E0930 12:21:18.751990 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:21:50.751953393 +0000 UTC m=+85.001635589 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.752062 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.752170 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.752243 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.752303 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:18 crc kubenswrapper[5002]: E0930 12:21:18.752367 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 12:21:18 crc kubenswrapper[5002]: E0930 12:21:18.752412 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 12:21:18 crc kubenswrapper[5002]: E0930 12:21:18.752436 5002 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:21:18 crc kubenswrapper[5002]: E0930 12:21:18.752451 5002 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 12:21:18 crc kubenswrapper[5002]: E0930 12:21:18.752461 5002 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 12:21:18 crc kubenswrapper[5002]: E0930 12:21:18.752532 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 12:21:18 crc kubenswrapper[5002]: E0930 12:21:18.752568 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 12:21:50.752544149 +0000 UTC m=+85.002226335 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:21:18 crc kubenswrapper[5002]: E0930 12:21:18.752615 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 12:21:50.75259844 +0000 UTC m=+85.002280616 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 12:21:18 crc kubenswrapper[5002]: E0930 12:21:18.752645 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 12:21:50.752628961 +0000 UTC m=+85.002311137 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 12:21:18 crc kubenswrapper[5002]: E0930 12:21:18.752574 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 12:21:18 crc kubenswrapper[5002]: E0930 12:21:18.752673 5002 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:21:18 crc kubenswrapper[5002]: E0930 12:21:18.752741 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 12:21:50.752729564 +0000 UTC m=+85.002411750 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.783502 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.783548 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.783560 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.783578 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.783591 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:18Z","lastTransitionTime":"2025-09-30T12:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.885528 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.885698 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.885745 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.885771 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.885785 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:18Z","lastTransitionTime":"2025-09-30T12:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.988795 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.988834 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.988844 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.988884 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:18 crc kubenswrapper[5002]: I0930 12:21:18.988914 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:18Z","lastTransitionTime":"2025-09-30T12:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.091431 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.091464 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.091514 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.091529 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.091539 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:19Z","lastTransitionTime":"2025-09-30T12:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.179235 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pvsr_7095aa7a-d067-4977-bdc5-3a45a52a6a39/ovnkube-controller/2.log" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.183909 5002 scope.go:117] "RemoveContainer" containerID="4e8af62b8de35a0c80cb0fafe3fc81375703e08d812d830cc738ee9bf7c102ee" Sep 30 12:21:19 crc kubenswrapper[5002]: E0930 12:21:19.184212 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4pvsr_openshift-ovn-kubernetes(7095aa7a-d067-4977-bdc5-3a45a52a6a39)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.195712 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.195768 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.195785 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.195809 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.195825 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:19Z","lastTransitionTime":"2025-09-30T12:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.204460 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:19Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.222357 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:19Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.235452 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:19Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.252459 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:19Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.269624 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e8af62b8de35a0c80cb0fafe3fc81375703e08d812d830cc738ee9bf7c102ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e8af62b8de35a0c80cb0fafe3fc81375703e08d812d830cc738ee9bf7c102ee\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:17Z\\\",\\\"message\\\":\\\"rc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:17Z is after 2025-08-24T17:21:41Z]\\\\nI0930 12:21:17.550120 6732 transact.go:42] Configuring OVN: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c94130be-172c-477c-88c4-40cc7eba30fe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:21:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4pvsr_openshift-ovn-kubernetes(7095aa7a-d067-4977-bdc5-3a45a52a6a39)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:19Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.279846 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dj2ln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68756e8a-d882-403f-acd7-2c41fce4446f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dj2ln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:19Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.290878 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98317299-6ce9-4196-a387-becb81461d6a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6057b9d238b4d4dd1329f0ffdbfd88f2f8869c06e5eb681041229a15dd4a08dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f9b5d0a287ad67a6f1008d06b0d371a522722280353ed9001f81198b9295ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0dba52ee6bddc02187198fe6c409abf3d336df93b558b27fc7f76e7ec8f2b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:19Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.298050 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.298085 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.298095 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.298110 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.298119 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:19Z","lastTransitionTime":"2025-09-30T12:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.302032 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:19Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.309715 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:19Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.322577 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e10fed923eacb0c5409835ec689ec74b4ef68b770774b3fe03e05db05d63c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:19Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.334662 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb569d449d2d8d49ee8d62c4815b625481cd5ddd882a135dac7465fe63fbc67e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4537e7bf982dba689bd81821609884ac5f2530e6efa7d48cd65e05ff64d2ec6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvvbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:19Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.353042 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:19Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.369921 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba33674850f0b121ea405e37924fcc085b3a3625e87432d9df38b2420920bf45\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:19Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.383362 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:19Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.396099 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:19Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.399846 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.399888 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.399899 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.399914 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.399924 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:19Z","lastTransitionTime":"2025-09-30T12:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.409431 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:19Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.421729 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:19Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.433371 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:19Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.503569 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.503601 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.503609 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.503625 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.503633 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:19Z","lastTransitionTime":"2025-09-30T12:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.606168 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.606215 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.606225 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.606238 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.606250 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:19Z","lastTransitionTime":"2025-09-30T12:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.675163 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:19 crc kubenswrapper[5002]: E0930 12:21:19.675278 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.709946 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.710003 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.710017 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.710040 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.710057 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:19Z","lastTransitionTime":"2025-09-30T12:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.812994 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.813071 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.813091 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.813119 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.813142 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:19Z","lastTransitionTime":"2025-09-30T12:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.915794 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.915866 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.915890 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.915920 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:19 crc kubenswrapper[5002]: I0930 12:21:19.915942 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:19Z","lastTransitionTime":"2025-09-30T12:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.019024 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.019082 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.019100 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.019124 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.019141 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:20Z","lastTransitionTime":"2025-09-30T12:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.122364 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.122792 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.123014 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.123208 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.123403 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:20Z","lastTransitionTime":"2025-09-30T12:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.226816 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.226878 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.226894 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.226914 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.226925 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:20Z","lastTransitionTime":"2025-09-30T12:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.269213 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/68756e8a-d882-403f-acd7-2c41fce4446f-metrics-certs\") pod \"network-metrics-daemon-dj2ln\" (UID: \"68756e8a-d882-403f-acd7-2c41fce4446f\") " pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:20 crc kubenswrapper[5002]: E0930 12:21:20.269427 5002 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 12:21:20 crc kubenswrapper[5002]: E0930 12:21:20.269552 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/68756e8a-d882-403f-acd7-2c41fce4446f-metrics-certs podName:68756e8a-d882-403f-acd7-2c41fce4446f nodeName:}" failed. No retries permitted until 2025-09-30 12:21:36.269527184 +0000 UTC m=+70.519209370 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/68756e8a-d882-403f-acd7-2c41fce4446f-metrics-certs") pod "network-metrics-daemon-dj2ln" (UID: "68756e8a-d882-403f-acd7-2c41fce4446f") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.329827 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.329896 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.329918 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.329942 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.329958 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:20Z","lastTransitionTime":"2025-09-30T12:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.433229 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.433696 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.433873 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.434040 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.434180 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:20Z","lastTransitionTime":"2025-09-30T12:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.543136 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.543250 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.543273 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.543300 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.543317 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:20Z","lastTransitionTime":"2025-09-30T12:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.646143 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.646221 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.646240 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.646266 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.646283 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:20Z","lastTransitionTime":"2025-09-30T12:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.675840 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.676019 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:20 crc kubenswrapper[5002]: E0930 12:21:20.676207 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.676282 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:20 crc kubenswrapper[5002]: E0930 12:21:20.676522 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:20 crc kubenswrapper[5002]: E0930 12:21:20.676649 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.749269 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.749325 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.749346 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.749375 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.749392 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:20Z","lastTransitionTime":"2025-09-30T12:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.852622 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.852682 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.852699 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.852723 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.852740 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:20Z","lastTransitionTime":"2025-09-30T12:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.956033 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.956120 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.956140 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.956167 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:20 crc kubenswrapper[5002]: I0930 12:21:20.956186 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:20Z","lastTransitionTime":"2025-09-30T12:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.059149 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.059196 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.059208 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.059226 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.059237 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:21Z","lastTransitionTime":"2025-09-30T12:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.162981 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.163043 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.163065 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.163096 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.163118 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:21Z","lastTransitionTime":"2025-09-30T12:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.265389 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.265807 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.265927 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.266060 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.266342 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:21Z","lastTransitionTime":"2025-09-30T12:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.369781 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.369842 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.369863 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.369887 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.369905 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:21Z","lastTransitionTime":"2025-09-30T12:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.472091 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.472117 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.472124 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.472136 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.472144 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:21Z","lastTransitionTime":"2025-09-30T12:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.574825 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.575177 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.575367 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.575573 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.575738 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:21Z","lastTransitionTime":"2025-09-30T12:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.675335 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:21 crc kubenswrapper[5002]: E0930 12:21:21.675514 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.677591 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.677615 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.677623 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.677634 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.677642 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:21Z","lastTransitionTime":"2025-09-30T12:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.780566 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.781003 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.781232 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.781433 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.781695 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:21Z","lastTransitionTime":"2025-09-30T12:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.885978 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.886760 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.886905 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.887099 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.887291 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:21Z","lastTransitionTime":"2025-09-30T12:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.905681 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.933786 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:21Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.949612 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:21Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.966614 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:21Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.985694 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:21Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.993389 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.993633 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.993671 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.993703 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:21 crc kubenswrapper[5002]: I0930 12:21:21.993726 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:21Z","lastTransitionTime":"2025-09-30T12:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.011255 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e8af62b8de35a0c80cb0fafe3fc81375703e08d812d830cc738ee9bf7c102ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e8af62b8de35a0c80cb0fafe3fc81375703e08d812d830cc738ee9bf7c102ee\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:17Z\\\",\\\"message\\\":\\\"rc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:17Z is after 2025-08-24T17:21:41Z]\\\\nI0930 12:21:17.550120 6732 transact.go:42] Configuring OVN: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c94130be-172c-477c-88c4-40cc7eba30fe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:21:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4pvsr_openshift-ovn-kubernetes(7095aa7a-d067-4977-bdc5-3a45a52a6a39)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:22Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.030574 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:22Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.052960 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:22Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.073609 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:22Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.088927 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:22Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.096364 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.096415 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.096428 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.096449 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.096464 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:22Z","lastTransitionTime":"2025-09-30T12:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.104295 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e10fed923eacb0c5409835ec689ec74b4ef68b770774b3fe03e05db05d63c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:22Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.114887 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dj2ln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68756e8a-d882-403f-acd7-2c41fce4446f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dj2ln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:22Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.127103 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98317299-6ce9-4196-a387-becb81461d6a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6057b9d238b4d4dd1329f0ffdbfd88f2f8869c06e5eb681041229a15dd4a08dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f9b5d0a287ad67a6f1008d06b0d371a522722280353ed9001f81198b9295ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0dba52ee6bddc02187198fe6c409abf3d336df93b558b27fc7f76e7ec8f2b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:22Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.140897 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:22Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.155262 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:22Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.171575 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:22Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.184987 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb569d449d2d8d49ee8d62c4815b625481cd5ddd882a135dac7465fe63fbc67e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4537e7bf982dba689bd81821609884ac5f2530e6efa7d48cd65e05ff64d2ec6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvvbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:22Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.198504 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.198537 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.198546 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.198561 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.198573 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:22Z","lastTransitionTime":"2025-09-30T12:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.209299 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:22Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.231733 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba33674850f0b121ea405e37924fcc085b3a3625e87432d9df38b2420920bf45\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:22Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.301367 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.301513 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.301531 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.301556 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.301601 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:22Z","lastTransitionTime":"2025-09-30T12:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.405149 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.405252 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.405276 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.405304 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.405325 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:22Z","lastTransitionTime":"2025-09-30T12:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.508619 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.508691 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.508703 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.508718 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.508727 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:22Z","lastTransitionTime":"2025-09-30T12:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.612209 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.612305 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.612335 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.612369 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.612394 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:22Z","lastTransitionTime":"2025-09-30T12:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.676245 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.676313 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:22 crc kubenswrapper[5002]: E0930 12:21:22.676440 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.676507 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:22 crc kubenswrapper[5002]: E0930 12:21:22.676777 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:22 crc kubenswrapper[5002]: E0930 12:21:22.677083 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.715539 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.715607 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.715619 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.715634 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.715644 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:22Z","lastTransitionTime":"2025-09-30T12:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.818201 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.818273 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.818297 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.818326 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.818348 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:22Z","lastTransitionTime":"2025-09-30T12:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.921445 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.921562 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.921587 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.921621 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:22 crc kubenswrapper[5002]: I0930 12:21:22.921644 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:22Z","lastTransitionTime":"2025-09-30T12:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.025122 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.025173 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.025190 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.025216 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.025233 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:23Z","lastTransitionTime":"2025-09-30T12:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.127701 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.127769 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.127793 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.127821 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.127844 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:23Z","lastTransitionTime":"2025-09-30T12:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.231714 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.231786 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.231809 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.231836 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.231857 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:23Z","lastTransitionTime":"2025-09-30T12:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.335699 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.335770 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.335787 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.335812 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.335830 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:23Z","lastTransitionTime":"2025-09-30T12:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.437877 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.437940 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.437950 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.437964 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.437973 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:23Z","lastTransitionTime":"2025-09-30T12:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.441625 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.441668 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.441677 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.441692 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.441701 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:23Z","lastTransitionTime":"2025-09-30T12:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:23 crc kubenswrapper[5002]: E0930 12:21:23.454046 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:23Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.457615 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.457653 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.457663 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.457678 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.457687 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:23Z","lastTransitionTime":"2025-09-30T12:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:23 crc kubenswrapper[5002]: E0930 12:21:23.468209 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:23Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.472340 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.472373 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.472385 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.472399 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.472408 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:23Z","lastTransitionTime":"2025-09-30T12:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:23 crc kubenswrapper[5002]: E0930 12:21:23.484236 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:23Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.488039 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.488089 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.488100 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.488119 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.488132 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:23Z","lastTransitionTime":"2025-09-30T12:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:23 crc kubenswrapper[5002]: E0930 12:21:23.508622 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:23Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.512632 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.512670 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.512678 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.512691 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.512703 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:23Z","lastTransitionTime":"2025-09-30T12:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:23 crc kubenswrapper[5002]: E0930 12:21:23.524239 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:23Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:23 crc kubenswrapper[5002]: E0930 12:21:23.524408 5002 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.541519 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.541579 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.541597 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.541622 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.541639 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:23Z","lastTransitionTime":"2025-09-30T12:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.644101 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.644147 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.644162 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.644178 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.644190 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:23Z","lastTransitionTime":"2025-09-30T12:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.675522 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:23 crc kubenswrapper[5002]: E0930 12:21:23.675706 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.747413 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.747450 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.747458 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.747483 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.747493 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:23Z","lastTransitionTime":"2025-09-30T12:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.850229 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.850279 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.850288 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.850325 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.850334 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:23Z","lastTransitionTime":"2025-09-30T12:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.959195 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.959233 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.959243 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.959257 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:23 crc kubenswrapper[5002]: I0930 12:21:23.959267 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:23Z","lastTransitionTime":"2025-09-30T12:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.061778 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.061816 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.061823 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.061835 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.061844 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:24Z","lastTransitionTime":"2025-09-30T12:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.165124 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.165180 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.165189 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.165205 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.165214 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:24Z","lastTransitionTime":"2025-09-30T12:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.267969 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.268040 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.268067 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.268096 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.268114 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:24Z","lastTransitionTime":"2025-09-30T12:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.372058 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.372138 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.372172 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.372200 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.372220 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:24Z","lastTransitionTime":"2025-09-30T12:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.475638 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.475712 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.475732 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.475785 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.475804 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:24Z","lastTransitionTime":"2025-09-30T12:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.578393 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.578445 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.578457 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.578492 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.578504 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:24Z","lastTransitionTime":"2025-09-30T12:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.675871 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.675917 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:24 crc kubenswrapper[5002]: E0930 12:21:24.676129 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.676187 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:24 crc kubenswrapper[5002]: E0930 12:21:24.676307 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:24 crc kubenswrapper[5002]: E0930 12:21:24.676575 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.681309 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.681359 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.681378 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.681400 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.681416 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:24Z","lastTransitionTime":"2025-09-30T12:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.784747 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.784805 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.784823 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.784850 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.784869 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:24Z","lastTransitionTime":"2025-09-30T12:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.888032 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.888265 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.888415 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.888557 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.888672 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:24Z","lastTransitionTime":"2025-09-30T12:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.991182 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.991293 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.991307 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.991323 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:24 crc kubenswrapper[5002]: I0930 12:21:24.991336 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:24Z","lastTransitionTime":"2025-09-30T12:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.094852 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.094918 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.094942 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.094971 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.094991 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:25Z","lastTransitionTime":"2025-09-30T12:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.198563 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.198639 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.198652 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.198673 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.198690 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:25Z","lastTransitionTime":"2025-09-30T12:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.301576 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.301864 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.301985 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.302160 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.302256 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:25Z","lastTransitionTime":"2025-09-30T12:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.405339 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.405429 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.405456 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.405544 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.405572 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:25Z","lastTransitionTime":"2025-09-30T12:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.508848 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.508905 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.508924 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.508948 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.508964 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:25Z","lastTransitionTime":"2025-09-30T12:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.611452 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.611523 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.611543 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.611563 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.611576 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:25Z","lastTransitionTime":"2025-09-30T12:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.676072 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:25 crc kubenswrapper[5002]: E0930 12:21:25.676332 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.714431 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.714510 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.714528 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.714549 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.714566 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:25Z","lastTransitionTime":"2025-09-30T12:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.817954 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.818013 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.818029 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.818052 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.818072 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:25Z","lastTransitionTime":"2025-09-30T12:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.920896 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.920960 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.920977 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.921003 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:25 crc kubenswrapper[5002]: I0930 12:21:25.921020 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:25Z","lastTransitionTime":"2025-09-30T12:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.024075 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.024140 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.024159 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.024187 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.024205 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:26Z","lastTransitionTime":"2025-09-30T12:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.127261 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.127322 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.127341 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.127364 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.127382 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:26Z","lastTransitionTime":"2025-09-30T12:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.230246 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.230301 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.230321 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.230404 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.230430 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:26Z","lastTransitionTime":"2025-09-30T12:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.334106 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.334181 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.334198 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.334737 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.334802 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:26Z","lastTransitionTime":"2025-09-30T12:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.438508 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.438855 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.439161 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.439323 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.439571 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:26Z","lastTransitionTime":"2025-09-30T12:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.542671 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.542728 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.542792 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.542841 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.542865 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:26Z","lastTransitionTime":"2025-09-30T12:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.646742 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.646799 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.646816 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.646841 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.646858 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:26Z","lastTransitionTime":"2025-09-30T12:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.675047 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:26 crc kubenswrapper[5002]: E0930 12:21:26.675376 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.675187 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:26 crc kubenswrapper[5002]: E0930 12:21:26.675896 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.675165 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:26 crc kubenswrapper[5002]: E0930 12:21:26.676271 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.691797 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb569d449d2d8d49ee8d62c4815b625481cd5ddd882a135dac7465fe63fbc67e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4537e7bf982dba689bd81821609884ac5f2530e6efa7d48cd65e05ff64d2ec6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvvbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:26Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.713849 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:26Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.729326 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba33674850f0b121ea405e37924fcc085b3a3625e87432d9df38b2420920bf45\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:26Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.746165 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:26Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.752042 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.752106 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.752139 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.752187 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.752210 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:26Z","lastTransitionTime":"2025-09-30T12:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.760736 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:26Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.773802 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:26Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.788784 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:26Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.799674 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:26Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.814799 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:26Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.833784 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:26Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.852454 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:26Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.854531 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.854559 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.854567 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.854580 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.854591 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:26Z","lastTransitionTime":"2025-09-30T12:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.867396 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:26Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.886919 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e8af62b8de35a0c80cb0fafe3fc81375703e08d812d830cc738ee9bf7c102ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e8af62b8de35a0c80cb0fafe3fc81375703e08d812d830cc738ee9bf7c102ee\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:17Z\\\",\\\"message\\\":\\\"rc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:17Z is after 2025-08-24T17:21:41Z]\\\\nI0930 12:21:17.550120 6732 transact.go:42] Configuring OVN: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c94130be-172c-477c-88c4-40cc7eba30fe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:21:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4pvsr_openshift-ovn-kubernetes(7095aa7a-d067-4977-bdc5-3a45a52a6a39)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:26Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.897401 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dj2ln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68756e8a-d882-403f-acd7-2c41fce4446f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dj2ln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:26Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.912332 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98317299-6ce9-4196-a387-becb81461d6a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6057b9d238b4d4dd1329f0ffdbfd88f2f8869c06e5eb681041229a15dd4a08dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f9b5d0a287ad67a6f1008d06b0d371a522722280353ed9001f81198b9295ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0dba52ee6bddc02187198fe6c409abf3d336df93b558b27fc7f76e7ec8f2b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:26Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.926037 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:26Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.937682 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:26Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.960003 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.960086 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.960118 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.959885 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e10fed923eacb0c5409835ec689ec74b4ef68b770774b3fe03e05db05d63c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:26Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.960174 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:26 crc kubenswrapper[5002]: I0930 12:21:26.960231 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:26Z","lastTransitionTime":"2025-09-30T12:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.062364 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.062404 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.062413 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.062427 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.062437 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:27Z","lastTransitionTime":"2025-09-30T12:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.164859 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.165092 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.165167 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.165246 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.165308 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:27Z","lastTransitionTime":"2025-09-30T12:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.267088 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.267123 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.267132 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.267171 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.267184 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:27Z","lastTransitionTime":"2025-09-30T12:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.370275 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.370323 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.370339 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.370363 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.370379 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:27Z","lastTransitionTime":"2025-09-30T12:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.473394 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.473448 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.473464 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.473526 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.473545 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:27Z","lastTransitionTime":"2025-09-30T12:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.576070 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.576135 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.576149 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.576188 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.576200 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:27Z","lastTransitionTime":"2025-09-30T12:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.675768 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:27 crc kubenswrapper[5002]: E0930 12:21:27.675954 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.678715 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.678772 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.678789 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.678810 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.678827 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:27Z","lastTransitionTime":"2025-09-30T12:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.782166 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.782582 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.782829 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.783022 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.783191 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:27Z","lastTransitionTime":"2025-09-30T12:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.887085 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.887154 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.887177 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.887205 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.887227 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:27Z","lastTransitionTime":"2025-09-30T12:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.989802 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.989857 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.989871 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.989897 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:27 crc kubenswrapper[5002]: I0930 12:21:27.989912 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:27Z","lastTransitionTime":"2025-09-30T12:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.093215 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.093276 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.093294 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.093317 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.093336 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:28Z","lastTransitionTime":"2025-09-30T12:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.196710 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.196782 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.196806 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.196836 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.196857 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:28Z","lastTransitionTime":"2025-09-30T12:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.300299 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.300347 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.300364 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.300386 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.300405 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:28Z","lastTransitionTime":"2025-09-30T12:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.403892 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.403947 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.403957 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.403971 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.403982 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:28Z","lastTransitionTime":"2025-09-30T12:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.507347 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.507419 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.507428 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.507449 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.507458 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:28Z","lastTransitionTime":"2025-09-30T12:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.611399 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.611452 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.611537 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.611571 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.611596 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:28Z","lastTransitionTime":"2025-09-30T12:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.675218 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.675257 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.675286 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:28 crc kubenswrapper[5002]: E0930 12:21:28.675400 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:28 crc kubenswrapper[5002]: E0930 12:21:28.675578 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:28 crc kubenswrapper[5002]: E0930 12:21:28.675716 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.713687 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.713749 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.713761 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.713778 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.713789 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:28Z","lastTransitionTime":"2025-09-30T12:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.816403 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.816763 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.816835 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.816929 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.816998 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:28Z","lastTransitionTime":"2025-09-30T12:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.920154 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.920216 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.920232 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.920256 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:28 crc kubenswrapper[5002]: I0930 12:21:28.920274 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:28Z","lastTransitionTime":"2025-09-30T12:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.022439 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.022670 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.022737 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.022811 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.022905 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:29Z","lastTransitionTime":"2025-09-30T12:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.125679 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.125738 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.125754 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.125776 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.125794 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:29Z","lastTransitionTime":"2025-09-30T12:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.229549 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.229593 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.229628 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.229662 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.229675 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:29Z","lastTransitionTime":"2025-09-30T12:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.332836 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.332893 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.332916 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.332941 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.332957 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:29Z","lastTransitionTime":"2025-09-30T12:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.435232 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.435269 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.435281 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.435301 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.435316 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:29Z","lastTransitionTime":"2025-09-30T12:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.537852 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.537917 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.537936 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.537960 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.537978 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:29Z","lastTransitionTime":"2025-09-30T12:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.641347 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.641714 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.641756 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.641787 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.641808 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:29Z","lastTransitionTime":"2025-09-30T12:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.675586 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:29 crc kubenswrapper[5002]: E0930 12:21:29.676253 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.677072 5002 scope.go:117] "RemoveContainer" containerID="4e8af62b8de35a0c80cb0fafe3fc81375703e08d812d830cc738ee9bf7c102ee" Sep 30 12:21:29 crc kubenswrapper[5002]: E0930 12:21:29.677691 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4pvsr_openshift-ovn-kubernetes(7095aa7a-d067-4977-bdc5-3a45a52a6a39)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.744977 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.745027 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.745044 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.745067 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.745084 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:29Z","lastTransitionTime":"2025-09-30T12:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.848925 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.848994 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.849018 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.849045 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.849066 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:29Z","lastTransitionTime":"2025-09-30T12:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.967337 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.967420 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.967438 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.967547 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:29 crc kubenswrapper[5002]: I0930 12:21:29.967580 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:29Z","lastTransitionTime":"2025-09-30T12:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.070734 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.070789 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.070806 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.070829 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.070846 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:30Z","lastTransitionTime":"2025-09-30T12:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.173104 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.173143 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.173189 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.173209 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.173220 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:30Z","lastTransitionTime":"2025-09-30T12:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.276743 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.277103 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.277241 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.277429 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.277583 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:30Z","lastTransitionTime":"2025-09-30T12:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.380637 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.380681 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.380690 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.380708 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.380718 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:30Z","lastTransitionTime":"2025-09-30T12:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.483450 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.483546 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.483556 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.483583 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.483596 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:30Z","lastTransitionTime":"2025-09-30T12:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.586348 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.586404 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.586421 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.586446 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.586462 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:30Z","lastTransitionTime":"2025-09-30T12:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.675314 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:30 crc kubenswrapper[5002]: E0930 12:21:30.675493 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.675607 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:30 crc kubenswrapper[5002]: E0930 12:21:30.675836 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.676211 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:30 crc kubenswrapper[5002]: E0930 12:21:30.676430 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.688953 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.688986 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.688995 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.689007 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.689016 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:30Z","lastTransitionTime":"2025-09-30T12:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.791281 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.791327 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.791338 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.791355 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.791368 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:30Z","lastTransitionTime":"2025-09-30T12:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.893419 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.893460 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.893517 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.893536 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.893546 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:30Z","lastTransitionTime":"2025-09-30T12:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.995993 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.996038 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.996051 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.996068 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:30 crc kubenswrapper[5002]: I0930 12:21:30.996080 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:30Z","lastTransitionTime":"2025-09-30T12:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.098695 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.098744 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.098756 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.098772 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.098785 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:31Z","lastTransitionTime":"2025-09-30T12:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.200962 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.201035 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.201055 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.201274 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.201297 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:31Z","lastTransitionTime":"2025-09-30T12:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.303903 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.303935 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.303943 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.303955 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.303963 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:31Z","lastTransitionTime":"2025-09-30T12:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.406270 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.406300 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.406308 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.406321 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.406329 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:31Z","lastTransitionTime":"2025-09-30T12:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.509402 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.509446 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.509458 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.509496 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.509509 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:31Z","lastTransitionTime":"2025-09-30T12:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.612075 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.612149 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.612179 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.612210 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.612232 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:31Z","lastTransitionTime":"2025-09-30T12:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.675847 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:31 crc kubenswrapper[5002]: E0930 12:21:31.676010 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.714933 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.714983 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.714994 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.715010 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.715021 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:31Z","lastTransitionTime":"2025-09-30T12:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.817619 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.817670 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.817686 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.817711 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.817729 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:31Z","lastTransitionTime":"2025-09-30T12:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.919745 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.919808 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.919826 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.919852 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:31 crc kubenswrapper[5002]: I0930 12:21:31.919867 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:31Z","lastTransitionTime":"2025-09-30T12:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.022621 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.022697 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.022724 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.022754 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.022779 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:32Z","lastTransitionTime":"2025-09-30T12:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.125599 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.125653 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.125669 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.125693 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.125713 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:32Z","lastTransitionTime":"2025-09-30T12:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.227918 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.227986 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.228008 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.228036 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.228057 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:32Z","lastTransitionTime":"2025-09-30T12:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.331251 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.331302 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.331321 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.331345 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.331363 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:32Z","lastTransitionTime":"2025-09-30T12:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.434411 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.434516 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.434544 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.434571 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.434591 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:32Z","lastTransitionTime":"2025-09-30T12:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.536855 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.536906 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.536927 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.536951 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.536970 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:32Z","lastTransitionTime":"2025-09-30T12:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.639259 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.639294 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.639303 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.639317 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.639327 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:32Z","lastTransitionTime":"2025-09-30T12:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.675408 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.675564 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.675596 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:32 crc kubenswrapper[5002]: E0930 12:21:32.675763 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:32 crc kubenswrapper[5002]: E0930 12:21:32.675832 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:32 crc kubenswrapper[5002]: E0930 12:21:32.675966 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.741981 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.742033 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.742045 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.742064 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.742075 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:32Z","lastTransitionTime":"2025-09-30T12:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.845353 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.845947 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.846101 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.846221 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.846310 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:32Z","lastTransitionTime":"2025-09-30T12:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.948840 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.948904 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.948921 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.948945 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:32 crc kubenswrapper[5002]: I0930 12:21:32.948959 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:32Z","lastTransitionTime":"2025-09-30T12:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.051206 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.051278 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.051289 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.051308 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.051321 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:33Z","lastTransitionTime":"2025-09-30T12:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.153611 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.153656 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.153668 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.153686 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.153698 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:33Z","lastTransitionTime":"2025-09-30T12:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.255913 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.255977 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.255996 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.256020 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.256044 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:33Z","lastTransitionTime":"2025-09-30T12:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.358890 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.359160 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.359226 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.359309 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.359370 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:33Z","lastTransitionTime":"2025-09-30T12:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.462045 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.462836 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.463124 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.463230 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.463335 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:33Z","lastTransitionTime":"2025-09-30T12:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.566422 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.566499 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.566518 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.566541 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.566554 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:33Z","lastTransitionTime":"2025-09-30T12:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.668702 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.668749 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.668760 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.668777 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.668788 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:33Z","lastTransitionTime":"2025-09-30T12:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.675648 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:33 crc kubenswrapper[5002]: E0930 12:21:33.675925 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.686526 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.771347 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.771393 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.771406 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.771426 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.771439 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:33Z","lastTransitionTime":"2025-09-30T12:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.857049 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.857089 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.857099 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.857112 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.857122 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:33Z","lastTransitionTime":"2025-09-30T12:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:33 crc kubenswrapper[5002]: E0930 12:21:33.872636 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:33Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.876632 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.876652 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.876660 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.876670 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.876680 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:33Z","lastTransitionTime":"2025-09-30T12:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:33 crc kubenswrapper[5002]: E0930 12:21:33.891215 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:33Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.895390 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.895636 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.895702 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.895772 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.895840 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:33Z","lastTransitionTime":"2025-09-30T12:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:33 crc kubenswrapper[5002]: E0930 12:21:33.910668 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:33Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.915453 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.915618 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.915684 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.915746 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.915804 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:33Z","lastTransitionTime":"2025-09-30T12:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:33 crc kubenswrapper[5002]: E0930 12:21:33.932431 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:33Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.935995 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.936084 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.936152 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.936225 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.936287 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:33Z","lastTransitionTime":"2025-09-30T12:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:33 crc kubenswrapper[5002]: E0930 12:21:33.946720 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:33Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:33 crc kubenswrapper[5002]: E0930 12:21:33.947063 5002 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.948520 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.948551 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.948560 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.948573 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:33 crc kubenswrapper[5002]: I0930 12:21:33.948582 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:33Z","lastTransitionTime":"2025-09-30T12:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.051198 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.051631 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.052458 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.052723 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.052937 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:34Z","lastTransitionTime":"2025-09-30T12:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.156013 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.156323 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.156421 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.156561 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.156668 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:34Z","lastTransitionTime":"2025-09-30T12:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.259325 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.259384 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.259402 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.259426 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.259442 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:34Z","lastTransitionTime":"2025-09-30T12:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.362500 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.362548 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.362564 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.362589 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.362607 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:34Z","lastTransitionTime":"2025-09-30T12:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.464836 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.465203 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.465320 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.465445 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.465744 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:34Z","lastTransitionTime":"2025-09-30T12:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.568315 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.568616 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.568722 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.568818 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.568907 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:34Z","lastTransitionTime":"2025-09-30T12:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.672603 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.672649 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.672661 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.672676 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.672687 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:34Z","lastTransitionTime":"2025-09-30T12:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.676013 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:34 crc kubenswrapper[5002]: E0930 12:21:34.676114 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.676582 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:34 crc kubenswrapper[5002]: E0930 12:21:34.677011 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.677263 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:34 crc kubenswrapper[5002]: E0930 12:21:34.677610 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.775177 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.775583 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.776014 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.776314 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.776617 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:34Z","lastTransitionTime":"2025-09-30T12:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.879440 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.879553 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.879568 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.879597 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.879644 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:34Z","lastTransitionTime":"2025-09-30T12:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.982187 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.982465 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.982576 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.982687 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:34 crc kubenswrapper[5002]: I0930 12:21:34.982774 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:34Z","lastTransitionTime":"2025-09-30T12:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.086870 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.086917 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.086928 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.086945 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.086958 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:35Z","lastTransitionTime":"2025-09-30T12:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.190945 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.191027 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.191050 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.191078 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.191099 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:35Z","lastTransitionTime":"2025-09-30T12:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.294362 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.294409 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.294419 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.294436 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.294447 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:35Z","lastTransitionTime":"2025-09-30T12:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.397849 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.398346 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.398804 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.399199 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.399577 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:35Z","lastTransitionTime":"2025-09-30T12:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.503031 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.503078 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.503092 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.503116 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.503130 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:35Z","lastTransitionTime":"2025-09-30T12:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.606709 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.607080 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.607283 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.607533 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.607720 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:35Z","lastTransitionTime":"2025-09-30T12:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.675885 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:35 crc kubenswrapper[5002]: E0930 12:21:35.676131 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.713323 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.713425 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.713450 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.713516 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.713560 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:35Z","lastTransitionTime":"2025-09-30T12:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.817578 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.817637 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.817649 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.817669 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.817685 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:35Z","lastTransitionTime":"2025-09-30T12:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.920351 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.920408 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.920421 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.920444 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:35 crc kubenswrapper[5002]: I0930 12:21:35.920456 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:35Z","lastTransitionTime":"2025-09-30T12:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.023782 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.023847 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.023870 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.023895 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.023915 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:36Z","lastTransitionTime":"2025-09-30T12:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.126978 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.127049 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.127072 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.127101 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.127122 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:36Z","lastTransitionTime":"2025-09-30T12:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.230011 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.230085 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.230110 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.230138 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.230160 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:36Z","lastTransitionTime":"2025-09-30T12:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.331775 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.331833 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.331850 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.331871 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.331888 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:36Z","lastTransitionTime":"2025-09-30T12:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.334427 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/68756e8a-d882-403f-acd7-2c41fce4446f-metrics-certs\") pod \"network-metrics-daemon-dj2ln\" (UID: \"68756e8a-d882-403f-acd7-2c41fce4446f\") " pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:36 crc kubenswrapper[5002]: E0930 12:21:36.334557 5002 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 12:21:36 crc kubenswrapper[5002]: E0930 12:21:36.334603 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/68756e8a-d882-403f-acd7-2c41fce4446f-metrics-certs podName:68756e8a-d882-403f-acd7-2c41fce4446f nodeName:}" failed. No retries permitted until 2025-09-30 12:22:08.334587858 +0000 UTC m=+102.584270004 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/68756e8a-d882-403f-acd7-2c41fce4446f-metrics-certs") pod "network-metrics-daemon-dj2ln" (UID: "68756e8a-d882-403f-acd7-2c41fce4446f") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.434001 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.434040 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.434056 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.434073 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.434084 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:36Z","lastTransitionTime":"2025-09-30T12:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.536642 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.536703 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.536714 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.536736 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.536748 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:36Z","lastTransitionTime":"2025-09-30T12:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.639337 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.639409 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.639431 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.639460 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.639515 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:36Z","lastTransitionTime":"2025-09-30T12:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.675591 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.675647 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.675656 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:36 crc kubenswrapper[5002]: E0930 12:21:36.675815 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:36 crc kubenswrapper[5002]: E0930 12:21:36.676001 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:36 crc kubenswrapper[5002]: E0930 12:21:36.676023 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.691537 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:36Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.707139 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:36Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.725853 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e10fed923eacb0c5409835ec689ec74b4ef68b770774b3fe03e05db05d63c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:36Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.741499 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dj2ln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68756e8a-d882-403f-acd7-2c41fce4446f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dj2ln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:36Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.742180 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.742438 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.742608 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.742835 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.743020 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:36Z","lastTransitionTime":"2025-09-30T12:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.762853 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be2f35cf-d5f8-40aa-ae79-11e075053735\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b282a6a59ec67140cc73df77f9505388fb4ff97dfdedf8394eeb46d65a2bda8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f6f78afdbf3b3bd7c28515e9ff9ad08cdccd4f4f44106f8aa5375638b3e8ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f6f78afdbf3b3bd7c28515e9ff9ad08cdccd4f4f44106f8aa5375638b3e8ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:36Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.779414 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98317299-6ce9-4196-a387-becb81461d6a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6057b9d238b4d4dd1329f0ffdbfd88f2f8869c06e5eb681041229a15dd4a08dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f9b5d0a287ad67a6f1008d06b0d371a522722280353ed9001f81198b9295ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0dba52ee6bddc02187198fe6c409abf3d336df93b558b27fc7f76e7ec8f2b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:36Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.796832 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:36Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.813504 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:36Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.830055 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:36Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.844060 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb569d449d2d8d49ee8d62c4815b625481cd5ddd882a135dac7465fe63fbc67e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4537e7bf982dba689bd81821609884ac5f2530e6efa7d48cd65e05ff64d2ec6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvvbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:36Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.845441 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.845491 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.845501 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.845515 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.845525 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:36Z","lastTransitionTime":"2025-09-30T12:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.869385 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:36Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.887045 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba33674850f0b121ea405e37924fcc085b3a3625e87432d9df38b2420920bf45\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:36Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.901397 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:36Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.912289 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:36Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.927325 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:36Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.941449 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:36Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.949863 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.949919 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.949935 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.949955 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.949971 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:36Z","lastTransitionTime":"2025-09-30T12:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.964659 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e8af62b8de35a0c80cb0fafe3fc81375703e08d812d830cc738ee9bf7c102ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e8af62b8de35a0c80cb0fafe3fc81375703e08d812d830cc738ee9bf7c102ee\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:17Z\\\",\\\"message\\\":\\\"rc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:17Z is after 2025-08-24T17:21:41Z]\\\\nI0930 12:21:17.550120 6732 transact.go:42] Configuring OVN: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c94130be-172c-477c-88c4-40cc7eba30fe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:21:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4pvsr_openshift-ovn-kubernetes(7095aa7a-d067-4977-bdc5-3a45a52a6a39)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:36Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.978867 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:36Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:36 crc kubenswrapper[5002]: I0930 12:21:36.994149 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:36Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.052365 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.052415 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.052432 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.052455 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.052517 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:37Z","lastTransitionTime":"2025-09-30T12:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.155507 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.155544 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.155555 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.155571 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.155582 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:37Z","lastTransitionTime":"2025-09-30T12:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.258511 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.258548 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.258559 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.258574 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.258585 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:37Z","lastTransitionTime":"2025-09-30T12:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.361633 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.361897 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.362143 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.362339 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.362567 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:37Z","lastTransitionTime":"2025-09-30T12:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.465576 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.465945 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.466114 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.466282 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.466426 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:37Z","lastTransitionTime":"2025-09-30T12:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.569102 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.569401 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.569538 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.569660 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.569759 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:37Z","lastTransitionTime":"2025-09-30T12:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.671618 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.671996 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.672385 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.672578 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.672796 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:37Z","lastTransitionTime":"2025-09-30T12:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.675886 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:37 crc kubenswrapper[5002]: E0930 12:21:37.675981 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.776002 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.776034 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.776042 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.776054 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.776062 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:37Z","lastTransitionTime":"2025-09-30T12:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.878424 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.878463 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.878499 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.878533 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.878547 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:37Z","lastTransitionTime":"2025-09-30T12:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.980249 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.980296 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.980311 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.980333 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:37 crc kubenswrapper[5002]: I0930 12:21:37.980349 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:37Z","lastTransitionTime":"2025-09-30T12:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.082957 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.083002 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.083013 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.083030 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.083043 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:38Z","lastTransitionTime":"2025-09-30T12:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.185229 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.185279 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.185290 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.185306 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.185318 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:38Z","lastTransitionTime":"2025-09-30T12:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.247731 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ttfn8_2bd9f18d-bfb3-4bd7-9a87-242029cd3200/kube-multus/0.log" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.247785 5002 generic.go:334] "Generic (PLEG): container finished" podID="2bd9f18d-bfb3-4bd7-9a87-242029cd3200" containerID="0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5" exitCode=1 Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.247821 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-ttfn8" event={"ID":"2bd9f18d-bfb3-4bd7-9a87-242029cd3200","Type":"ContainerDied","Data":"0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5"} Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.248277 5002 scope.go:117] "RemoveContainer" containerID="0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.264749 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba33674850f0b121ea405e37924fcc085b3a3625e87432d9df38b2420920bf45\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:38Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.281157 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:38Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.288423 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.288459 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.288499 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.288515 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.288525 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:38Z","lastTransitionTime":"2025-09-30T12:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.293928 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:38Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.306630 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:38Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.324537 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb569d449d2d8d49ee8d62c4815b625481cd5ddd882a135dac7465fe63fbc67e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4537e7bf982dba689bd81821609884ac5f2530e6efa7d48cd65e05ff64d2ec6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvvbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:38Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.358704 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:38Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.378394 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:38Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.390260 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.390338 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.390362 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.390394 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.390436 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:38Z","lastTransitionTime":"2025-09-30T12:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.392141 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:38Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.411499 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:38Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.431800 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:38Z\\\",\\\"message\\\":\\\"2025-09-30T12:20:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_59b1707c-38a6-4f61-b1a0-65dcba4fefc7\\\\n2025-09-30T12:20:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_59b1707c-38a6-4f61-b1a0-65dcba4fefc7 to /host/opt/cni/bin/\\\\n2025-09-30T12:20:53Z [verbose] multus-daemon started\\\\n2025-09-30T12:20:53Z [verbose] Readiness Indicator file check\\\\n2025-09-30T12:21:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:38Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.441725 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:38Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.467404 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e8af62b8de35a0c80cb0fafe3fc81375703e08d812d830cc738ee9bf7c102ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e8af62b8de35a0c80cb0fafe3fc81375703e08d812d830cc738ee9bf7c102ee\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:17Z\\\",\\\"message\\\":\\\"rc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:17Z is after 2025-08-24T17:21:41Z]\\\\nI0930 12:21:17.550120 6732 transact.go:42] Configuring OVN: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c94130be-172c-477c-88c4-40cc7eba30fe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:21:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4pvsr_openshift-ovn-kubernetes(7095aa7a-d067-4977-bdc5-3a45a52a6a39)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:38Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.482704 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:38Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.493084 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.493134 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.493151 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.493173 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.493189 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:38Z","lastTransitionTime":"2025-09-30T12:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.497033 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98317299-6ce9-4196-a387-becb81461d6a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6057b9d238b4d4dd1329f0ffdbfd88f2f8869c06e5eb681041229a15dd4a08dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f9b5d0a287ad67a6f1008d06b0d371a522722280353ed9001f81198b9295ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0dba52ee6bddc02187198fe6c409abf3d336df93b558b27fc7f76e7ec8f2b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:38Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.510916 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:38Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.522889 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:38Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.541832 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e10fed923eacb0c5409835ec689ec74b4ef68b770774b3fe03e05db05d63c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:38Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.550823 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dj2ln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68756e8a-d882-403f-acd7-2c41fce4446f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dj2ln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:38Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.560550 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be2f35cf-d5f8-40aa-ae79-11e075053735\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b282a6a59ec67140cc73df77f9505388fb4ff97dfdedf8394eeb46d65a2bda8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f6f78afdbf3b3bd7c28515e9ff9ad08cdccd4f4f44106f8aa5375638b3e8ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f6f78afdbf3b3bd7c28515e9ff9ad08cdccd4f4f44106f8aa5375638b3e8ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:38Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.595249 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.595273 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.595281 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.595294 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.595303 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:38Z","lastTransitionTime":"2025-09-30T12:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.675642 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:38 crc kubenswrapper[5002]: E0930 12:21:38.675823 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.676148 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:38 crc kubenswrapper[5002]: E0930 12:21:38.676249 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.676550 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:38 crc kubenswrapper[5002]: E0930 12:21:38.676684 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.697718 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.697756 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.697764 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.697776 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.697786 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:38Z","lastTransitionTime":"2025-09-30T12:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.800749 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.800782 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.800790 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.800804 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.800812 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:38Z","lastTransitionTime":"2025-09-30T12:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.903104 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.903163 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.903175 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.903192 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:38 crc kubenswrapper[5002]: I0930 12:21:38.903202 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:38Z","lastTransitionTime":"2025-09-30T12:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.005360 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.005405 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.005418 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.005434 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.005446 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:39Z","lastTransitionTime":"2025-09-30T12:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.108349 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.108651 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.108721 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.108819 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.108893 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:39Z","lastTransitionTime":"2025-09-30T12:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.212093 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.213346 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.213536 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.213678 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.213806 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:39Z","lastTransitionTime":"2025-09-30T12:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.253685 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ttfn8_2bd9f18d-bfb3-4bd7-9a87-242029cd3200/kube-multus/0.log" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.253764 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-ttfn8" event={"ID":"2bd9f18d-bfb3-4bd7-9a87-242029cd3200","Type":"ContainerStarted","Data":"4cf7d940b7333c33f9092bf1a630d298ae51fee0947c17ef09d902ae5dc1103c"} Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.274879 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e10fed923eacb0c5409835ec689ec74b4ef68b770774b3fe03e05db05d63c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:39Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.293603 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dj2ln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68756e8a-d882-403f-acd7-2c41fce4446f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dj2ln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:39Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.310390 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be2f35cf-d5f8-40aa-ae79-11e075053735\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b282a6a59ec67140cc73df77f9505388fb4ff97dfdedf8394eeb46d65a2bda8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f6f78afdbf3b3bd7c28515e9ff9ad08cdccd4f4f44106f8aa5375638b3e8ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f6f78afdbf3b3bd7c28515e9ff9ad08cdccd4f4f44106f8aa5375638b3e8ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:39Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.315869 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.315914 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.315924 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.315938 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.315948 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:39Z","lastTransitionTime":"2025-09-30T12:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.328957 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98317299-6ce9-4196-a387-becb81461d6a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6057b9d238b4d4dd1329f0ffdbfd88f2f8869c06e5eb681041229a15dd4a08dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f9b5d0a287ad67a6f1008d06b0d371a522722280353ed9001f81198b9295ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0dba52ee6bddc02187198fe6c409abf3d336df93b558b27fc7f76e7ec8f2b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:39Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.346744 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:39Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.359313 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:39Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.375290 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:39Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.395105 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb569d449d2d8d49ee8d62c4815b625481cd5ddd882a135dac7465fe63fbc67e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4537e7bf982dba689bd81821609884ac5f2530e6efa7d48cd65e05ff64d2ec6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvvbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:39Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.419142 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.419197 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.419213 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.419235 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.419252 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:39Z","lastTransitionTime":"2025-09-30T12:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.427937 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:39Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.450689 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba33674850f0b121ea405e37924fcc085b3a3625e87432d9df38b2420920bf45\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:39Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.473210 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:39Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.487396 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:39Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.507397 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:39Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.521838 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.521899 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.521916 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.521941 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.521958 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:39Z","lastTransitionTime":"2025-09-30T12:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.523753 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:39Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.541913 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e8af62b8de35a0c80cb0fafe3fc81375703e08d812d830cc738ee9bf7c102ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e8af62b8de35a0c80cb0fafe3fc81375703e08d812d830cc738ee9bf7c102ee\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:17Z\\\",\\\"message\\\":\\\"rc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:17Z is after 2025-08-24T17:21:41Z]\\\\nI0930 12:21:17.550120 6732 transact.go:42] Configuring OVN: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c94130be-172c-477c-88c4-40cc7eba30fe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:21:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4pvsr_openshift-ovn-kubernetes(7095aa7a-d067-4977-bdc5-3a45a52a6a39)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:39Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.553449 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:39Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.564633 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:39Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.579271 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cf7d940b7333c33f9092bf1a630d298ae51fee0947c17ef09d902ae5dc1103c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:38Z\\\",\\\"message\\\":\\\"2025-09-30T12:20:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_59b1707c-38a6-4f61-b1a0-65dcba4fefc7\\\\n2025-09-30T12:20:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_59b1707c-38a6-4f61-b1a0-65dcba4fefc7 to /host/opt/cni/bin/\\\\n2025-09-30T12:20:53Z [verbose] multus-daemon started\\\\n2025-09-30T12:20:53Z [verbose] Readiness Indicator file check\\\\n2025-09-30T12:21:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:39Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.589757 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:39Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.624944 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.625009 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.625020 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.625036 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.625047 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:39Z","lastTransitionTime":"2025-09-30T12:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.675029 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:39 crc kubenswrapper[5002]: E0930 12:21:39.675223 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.727780 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.727821 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.727832 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.727848 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.727861 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:39Z","lastTransitionTime":"2025-09-30T12:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.830380 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.830465 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.830503 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.830521 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.830536 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:39Z","lastTransitionTime":"2025-09-30T12:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.933941 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.934002 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.934013 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.934031 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:39 crc kubenswrapper[5002]: I0930 12:21:39.934045 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:39Z","lastTransitionTime":"2025-09-30T12:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.036782 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.036829 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.036843 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.036859 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.036872 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:40Z","lastTransitionTime":"2025-09-30T12:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.140125 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.140181 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.140193 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.140210 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.140223 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:40Z","lastTransitionTime":"2025-09-30T12:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.243041 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.243104 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.243122 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.243149 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.243165 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:40Z","lastTransitionTime":"2025-09-30T12:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.346251 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.346305 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.346321 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.346340 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.346352 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:40Z","lastTransitionTime":"2025-09-30T12:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.449038 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.449082 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.449093 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.449110 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.449121 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:40Z","lastTransitionTime":"2025-09-30T12:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.552628 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.552695 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.552712 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.552736 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.552753 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:40Z","lastTransitionTime":"2025-09-30T12:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.655536 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.655599 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.655612 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.655631 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.655644 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:40Z","lastTransitionTime":"2025-09-30T12:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.675574 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.675611 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:40 crc kubenswrapper[5002]: E0930 12:21:40.675726 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.675761 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:40 crc kubenswrapper[5002]: E0930 12:21:40.675956 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:40 crc kubenswrapper[5002]: E0930 12:21:40.676015 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.758007 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.758067 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.758090 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.758114 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.758131 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:40Z","lastTransitionTime":"2025-09-30T12:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.861221 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.861310 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.861334 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.861364 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.861389 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:40Z","lastTransitionTime":"2025-09-30T12:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.964942 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.965007 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.965033 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.965060 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:40 crc kubenswrapper[5002]: I0930 12:21:40.965080 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:40Z","lastTransitionTime":"2025-09-30T12:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.069467 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.069556 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.069573 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.069597 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.069614 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:41Z","lastTransitionTime":"2025-09-30T12:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.172288 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.172319 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.172327 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.172339 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.172347 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:41Z","lastTransitionTime":"2025-09-30T12:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.274914 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.274966 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.274983 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.275004 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.275020 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:41Z","lastTransitionTime":"2025-09-30T12:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.378080 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.378141 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.378163 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.378190 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.378212 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:41Z","lastTransitionTime":"2025-09-30T12:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.481068 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.481136 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.481159 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.481189 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.481210 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:41Z","lastTransitionTime":"2025-09-30T12:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.583906 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.583958 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.583971 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.583992 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.584005 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:41Z","lastTransitionTime":"2025-09-30T12:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.675205 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:41 crc kubenswrapper[5002]: E0930 12:21:41.675456 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.687522 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.687586 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.687598 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.687619 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.687633 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:41Z","lastTransitionTime":"2025-09-30T12:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.790899 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.791002 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.791023 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.791049 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.791067 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:41Z","lastTransitionTime":"2025-09-30T12:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.894165 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.894211 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.894228 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.894249 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:41 crc kubenswrapper[5002]: I0930 12:21:41.894267 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:41Z","lastTransitionTime":"2025-09-30T12:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.003860 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.003915 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.003931 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.003957 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.003974 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:42Z","lastTransitionTime":"2025-09-30T12:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.107377 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.107510 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.107538 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.107568 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.107591 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:42Z","lastTransitionTime":"2025-09-30T12:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.210330 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.210393 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.210411 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.210434 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.210451 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:42Z","lastTransitionTime":"2025-09-30T12:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.312656 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.312721 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.312744 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.312776 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.312798 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:42Z","lastTransitionTime":"2025-09-30T12:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.415745 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.415823 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.415844 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.415872 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.415894 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:42Z","lastTransitionTime":"2025-09-30T12:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.517745 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.517778 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.517787 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.517800 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.517810 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:42Z","lastTransitionTime":"2025-09-30T12:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.621147 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.621188 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.621198 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.621213 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.621222 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:42Z","lastTransitionTime":"2025-09-30T12:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.675922 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.675960 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.676001 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:42 crc kubenswrapper[5002]: E0930 12:21:42.676075 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:42 crc kubenswrapper[5002]: E0930 12:21:42.676164 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:42 crc kubenswrapper[5002]: E0930 12:21:42.676253 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.724235 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.724311 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.724337 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.724366 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.724426 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:42Z","lastTransitionTime":"2025-09-30T12:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.827209 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.827268 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.827303 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.827319 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.827330 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:42Z","lastTransitionTime":"2025-09-30T12:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.929681 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.929737 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.929754 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.929779 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:42 crc kubenswrapper[5002]: I0930 12:21:42.929797 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:42Z","lastTransitionTime":"2025-09-30T12:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.032842 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.032922 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.032945 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.032977 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.033000 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:43Z","lastTransitionTime":"2025-09-30T12:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.136650 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.136730 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.136753 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.136804 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.136822 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:43Z","lastTransitionTime":"2025-09-30T12:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.240220 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.240261 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.240272 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.240289 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.240300 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:43Z","lastTransitionTime":"2025-09-30T12:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.342968 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.343043 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.343061 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.343086 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.343104 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:43Z","lastTransitionTime":"2025-09-30T12:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.446309 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.446356 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.446367 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.446384 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.446396 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:43Z","lastTransitionTime":"2025-09-30T12:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.548580 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.548638 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.548653 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.548675 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.548691 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:43Z","lastTransitionTime":"2025-09-30T12:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.651587 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.651652 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.651669 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.651694 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.651711 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:43Z","lastTransitionTime":"2025-09-30T12:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.675441 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:43 crc kubenswrapper[5002]: E0930 12:21:43.676160 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.676748 5002 scope.go:117] "RemoveContainer" containerID="4e8af62b8de35a0c80cb0fafe3fc81375703e08d812d830cc738ee9bf7c102ee" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.754309 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.754357 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.754370 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.754393 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.754408 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:43Z","lastTransitionTime":"2025-09-30T12:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.857551 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.857627 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.857647 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.857673 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.857691 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:43Z","lastTransitionTime":"2025-09-30T12:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.960299 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.960393 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.960418 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.960443 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:43 crc kubenswrapper[5002]: I0930 12:21:43.960462 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:43Z","lastTransitionTime":"2025-09-30T12:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.062750 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.062799 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.062808 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.062822 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.062833 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:44Z","lastTransitionTime":"2025-09-30T12:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.165916 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.165971 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.165981 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.165999 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.166012 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:44Z","lastTransitionTime":"2025-09-30T12:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.268368 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.268420 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.268451 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.268486 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.268498 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:44Z","lastTransitionTime":"2025-09-30T12:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.271842 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pvsr_7095aa7a-d067-4977-bdc5-3a45a52a6a39/ovnkube-controller/2.log" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.275049 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" event={"ID":"7095aa7a-d067-4977-bdc5-3a45a52a6a39","Type":"ContainerStarted","Data":"9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5"} Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.275604 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.289367 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:44Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.298355 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:44Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.308075 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.308119 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.308130 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.308145 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.308155 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:44Z","lastTransitionTime":"2025-09-30T12:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.310224 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:44Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:44 crc kubenswrapper[5002]: E0930 12:21:44.323685 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:44Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.328058 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e8af62b8de35a0c80cb0fafe3fc81375703e08d812d830cc738ee9bf7c102ee\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:17Z\\\",\\\"message\\\":\\\"rc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:17Z is after 2025-08-24T17:21:41Z]\\\\nI0930 12:21:17.550120 6732 transact.go:42] Configuring OVN: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c94130be-172c-477c-88c4-40cc7eba30fe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:21:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:44Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.328338 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.328359 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.328367 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.328379 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.328388 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:44Z","lastTransitionTime":"2025-09-30T12:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:44 crc kubenswrapper[5002]: E0930 12:21:44.339950 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:44Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.342113 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:44Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.343565 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.343610 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.343618 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.343632 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.343641 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:44Z","lastTransitionTime":"2025-09-30T12:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:44 crc kubenswrapper[5002]: E0930 12:21:44.353977 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:44Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.355147 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:44Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.357380 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.357410 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.357420 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.357436 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.357448 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:44Z","lastTransitionTime":"2025-09-30T12:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.365565 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cf7d940b7333c33f9092bf1a630d298ae51fee0947c17ef09d902ae5dc1103c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:38Z\\\",\\\"message\\\":\\\"2025-09-30T12:20:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_59b1707c-38a6-4f61-b1a0-65dcba4fefc7\\\\n2025-09-30T12:20:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_59b1707c-38a6-4f61-b1a0-65dcba4fefc7 to /host/opt/cni/bin/\\\\n2025-09-30T12:20:53Z [verbose] multus-daemon started\\\\n2025-09-30T12:20:53Z [verbose] Readiness Indicator file check\\\\n2025-09-30T12:21:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:44Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:44 crc kubenswrapper[5002]: E0930 12:21:44.368565 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:44Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.374244 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.374281 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.374290 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.374302 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.374310 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:44Z","lastTransitionTime":"2025-09-30T12:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.378800 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:44Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:44 crc kubenswrapper[5002]: E0930 12:21:44.385553 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:44Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:44 crc kubenswrapper[5002]: E0930 12:21:44.385663 5002 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.387099 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.387134 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.387142 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.387155 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.387163 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:44Z","lastTransitionTime":"2025-09-30T12:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.395540 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e10fed923eacb0c5409835ec689ec74b4ef68b770774b3fe03e05db05d63c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:44Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.407045 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dj2ln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68756e8a-d882-403f-acd7-2c41fce4446f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dj2ln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:44Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.416910 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be2f35cf-d5f8-40aa-ae79-11e075053735\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b282a6a59ec67140cc73df77f9505388fb4ff97dfdedf8394eeb46d65a2bda8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f6f78afdbf3b3bd7c28515e9ff9ad08cdccd4f4f44106f8aa5375638b3e8ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f6f78afdbf3b3bd7c28515e9ff9ad08cdccd4f4f44106f8aa5375638b3e8ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:44Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.429844 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98317299-6ce9-4196-a387-becb81461d6a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6057b9d238b4d4dd1329f0ffdbfd88f2f8869c06e5eb681041229a15dd4a08dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f9b5d0a287ad67a6f1008d06b0d371a522722280353ed9001f81198b9295ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0dba52ee6bddc02187198fe6c409abf3d336df93b558b27fc7f76e7ec8f2b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:44Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.441262 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:44Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.452430 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:44Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.463914 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:44Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.475890 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb569d449d2d8d49ee8d62c4815b625481cd5ddd882a135dac7465fe63fbc67e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4537e7bf982dba689bd81821609884ac5f2530e6efa7d48cd65e05ff64d2ec6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvvbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:44Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.490025 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.490057 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.490065 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.490078 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.490086 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:44Z","lastTransitionTime":"2025-09-30T12:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.493228 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:44Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.504956 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba33674850f0b121ea405e37924fcc085b3a3625e87432d9df38b2420920bf45\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:44Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.518163 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:44Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.591886 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.591938 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.591951 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.591970 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.591982 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:44Z","lastTransitionTime":"2025-09-30T12:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.675399 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.675410 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.675426 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:44 crc kubenswrapper[5002]: E0930 12:21:44.675721 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:44 crc kubenswrapper[5002]: E0930 12:21:44.675796 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:44 crc kubenswrapper[5002]: E0930 12:21:44.675885 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.694186 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.694234 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.694248 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.694270 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.694286 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:44Z","lastTransitionTime":"2025-09-30T12:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.796273 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.796312 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.796324 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.796338 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.796348 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:44Z","lastTransitionTime":"2025-09-30T12:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.899127 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.899208 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.899222 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.899241 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:44 crc kubenswrapper[5002]: I0930 12:21:44.899252 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:44Z","lastTransitionTime":"2025-09-30T12:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.002888 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.002970 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.002996 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.003028 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.003052 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:45Z","lastTransitionTime":"2025-09-30T12:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.105874 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.105929 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.105947 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.105968 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.105984 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:45Z","lastTransitionTime":"2025-09-30T12:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.208556 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.208604 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.208616 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.208635 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.208648 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:45Z","lastTransitionTime":"2025-09-30T12:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.280646 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pvsr_7095aa7a-d067-4977-bdc5-3a45a52a6a39/ovnkube-controller/3.log" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.282017 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pvsr_7095aa7a-d067-4977-bdc5-3a45a52a6a39/ovnkube-controller/2.log" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.285953 5002 generic.go:334] "Generic (PLEG): container finished" podID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerID="9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5" exitCode=1 Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.286041 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" event={"ID":"7095aa7a-d067-4977-bdc5-3a45a52a6a39","Type":"ContainerDied","Data":"9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5"} Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.286113 5002 scope.go:117] "RemoveContainer" containerID="4e8af62b8de35a0c80cb0fafe3fc81375703e08d812d830cc738ee9bf7c102ee" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.286698 5002 scope.go:117] "RemoveContainer" containerID="9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5" Sep 30 12:21:45 crc kubenswrapper[5002]: E0930 12:21:45.286915 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4pvsr_openshift-ovn-kubernetes(7095aa7a-d067-4977-bdc5-3a45a52a6a39)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.309197 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:45Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.311621 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.311674 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.311685 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.311741 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.311755 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:45Z","lastTransitionTime":"2025-09-30T12:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.324216 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cf7d940b7333c33f9092bf1a630d298ae51fee0947c17ef09d902ae5dc1103c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:38Z\\\",\\\"message\\\":\\\"2025-09-30T12:20:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_59b1707c-38a6-4f61-b1a0-65dcba4fefc7\\\\n2025-09-30T12:20:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_59b1707c-38a6-4f61-b1a0-65dcba4fefc7 to /host/opt/cni/bin/\\\\n2025-09-30T12:20:53Z [verbose] multus-daemon started\\\\n2025-09-30T12:20:53Z [verbose] Readiness Indicator file check\\\\n2025-09-30T12:21:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:45Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.343159 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:45Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.364968 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e8af62b8de35a0c80cb0fafe3fc81375703e08d812d830cc738ee9bf7c102ee\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:17Z\\\",\\\"message\\\":\\\"rc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:17Z is after 2025-08-24T17:21:41Z]\\\\nI0930 12:21:17.550120 6732 transact.go:42] Configuring OVN: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c94130be-172c-477c-88c4-40cc7eba30fe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:21:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"message\\\":\\\"Ds:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-machine-webhook\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-operator-machine-webhook_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-machine-webhook\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.250\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0930 12:21:44.676107 7084 obj_retry.go:420] Function iterateRetryResources for *v1.Pod ended (in 165.305µs)\\\\nF0930 12:21:44.676014 7084 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:21:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:45Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.385370 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:45Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.406548 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98317299-6ce9-4196-a387-becb81461d6a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6057b9d238b4d4dd1329f0ffdbfd88f2f8869c06e5eb681041229a15dd4a08dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f9b5d0a287ad67a6f1008d06b0d371a522722280353ed9001f81198b9295ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0dba52ee6bddc02187198fe6c409abf3d336df93b558b27fc7f76e7ec8f2b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:45Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.414369 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.414508 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.414528 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.414554 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.414584 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:45Z","lastTransitionTime":"2025-09-30T12:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.424012 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:45Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.443029 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:45Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.464166 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e10fed923eacb0c5409835ec689ec74b4ef68b770774b3fe03e05db05d63c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:45Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.484576 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dj2ln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68756e8a-d882-403f-acd7-2c41fce4446f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dj2ln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:45Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.497675 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be2f35cf-d5f8-40aa-ae79-11e075053735\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b282a6a59ec67140cc73df77f9505388fb4ff97dfdedf8394eeb46d65a2bda8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f6f78afdbf3b3bd7c28515e9ff9ad08cdccd4f4f44106f8aa5375638b3e8ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f6f78afdbf3b3bd7c28515e9ff9ad08cdccd4f4f44106f8aa5375638b3e8ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:45Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.515742 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba33674850f0b121ea405e37924fcc085b3a3625e87432d9df38b2420920bf45\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:45Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.517114 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.517203 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.517222 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.517246 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.517264 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:45Z","lastTransitionTime":"2025-09-30T12:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.532154 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:45Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.546935 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:45Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.567388 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:45Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.581158 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb569d449d2d8d49ee8d62c4815b625481cd5ddd882a135dac7465fe63fbc67e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4537e7bf982dba689bd81821609884ac5f2530e6efa7d48cd65e05ff64d2ec6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvvbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:45Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.599659 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:45Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.609615 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:45Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.620295 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.620345 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.620357 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.620377 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.620390 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:45Z","lastTransitionTime":"2025-09-30T12:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.626018 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:45Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.676012 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:45 crc kubenswrapper[5002]: E0930 12:21:45.676250 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.722964 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.723029 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.723039 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.723060 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.723078 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:45Z","lastTransitionTime":"2025-09-30T12:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.826074 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.826146 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.826159 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.826186 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.826205 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:45Z","lastTransitionTime":"2025-09-30T12:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.929265 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.929328 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.929339 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.929360 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:45 crc kubenswrapper[5002]: I0930 12:21:45.929372 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:45Z","lastTransitionTime":"2025-09-30T12:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.033997 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.034076 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.034093 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.034243 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.034275 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:46Z","lastTransitionTime":"2025-09-30T12:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.137430 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.137537 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.137562 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.137588 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.137611 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:46Z","lastTransitionTime":"2025-09-30T12:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.240499 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.240558 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.240574 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.240594 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.240609 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:46Z","lastTransitionTime":"2025-09-30T12:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.291275 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pvsr_7095aa7a-d067-4977-bdc5-3a45a52a6a39/ovnkube-controller/3.log" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.296308 5002 scope.go:117] "RemoveContainer" containerID="9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5" Sep 30 12:21:46 crc kubenswrapper[5002]: E0930 12:21:46.296628 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4pvsr_openshift-ovn-kubernetes(7095aa7a-d067-4977-bdc5-3a45a52a6a39)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.320054 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba33674850f0b121ea405e37924fcc085b3a3625e87432d9df38b2420920bf45\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.334631 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.342410 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.342464 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.342487 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.342503 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.342515 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:46Z","lastTransitionTime":"2025-09-30T12:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.352502 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.365001 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.380588 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb569d449d2d8d49ee8d62c4815b625481cd5ddd882a135dac7465fe63fbc67e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4537e7bf982dba689bd81821609884ac5f2530e6efa7d48cd65e05ff64d2ec6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvvbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.415641 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.431932 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.444958 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.445003 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.445011 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.445027 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.445037 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:46Z","lastTransitionTime":"2025-09-30T12:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.447917 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.466275 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.481163 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cf7d940b7333c33f9092bf1a630d298ae51fee0947c17ef09d902ae5dc1103c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:38Z\\\",\\\"message\\\":\\\"2025-09-30T12:20:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_59b1707c-38a6-4f61-b1a0-65dcba4fefc7\\\\n2025-09-30T12:20:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_59b1707c-38a6-4f61-b1a0-65dcba4fefc7 to /host/opt/cni/bin/\\\\n2025-09-30T12:20:53Z [verbose] multus-daemon started\\\\n2025-09-30T12:20:53Z [verbose] Readiness Indicator file check\\\\n2025-09-30T12:21:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.492608 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.524994 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"message\\\":\\\"Ds:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-machine-webhook\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-operator-machine-webhook_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-machine-webhook\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.250\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0930 12:21:44.676107 7084 obj_retry.go:420] Function iterateRetryResources for *v1.Pod ended (in 165.305µs)\\\\nF0930 12:21:44.676014 7084 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:21:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4pvsr_openshift-ovn-kubernetes(7095aa7a-d067-4977-bdc5-3a45a52a6a39)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.538590 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.546845 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.546899 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.546914 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.546934 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.546949 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:46Z","lastTransitionTime":"2025-09-30T12:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.550825 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98317299-6ce9-4196-a387-becb81461d6a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6057b9d238b4d4dd1329f0ffdbfd88f2f8869c06e5eb681041229a15dd4a08dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f9b5d0a287ad67a6f1008d06b0d371a522722280353ed9001f81198b9295ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0dba52ee6bddc02187198fe6c409abf3d336df93b558b27fc7f76e7ec8f2b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.563021 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.572974 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.586045 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e10fed923eacb0c5409835ec689ec74b4ef68b770774b3fe03e05db05d63c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.597559 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dj2ln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68756e8a-d882-403f-acd7-2c41fce4446f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dj2ln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.608764 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be2f35cf-d5f8-40aa-ae79-11e075053735\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b282a6a59ec67140cc73df77f9505388fb4ff97dfdedf8394eeb46d65a2bda8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f6f78afdbf3b3bd7c28515e9ff9ad08cdccd4f4f44106f8aa5375638b3e8ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f6f78afdbf3b3bd7c28515e9ff9ad08cdccd4f4f44106f8aa5375638b3e8ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.650041 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.650100 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.650116 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.650141 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.650158 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:46Z","lastTransitionTime":"2025-09-30T12:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.675068 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.675161 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.675202 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:46 crc kubenswrapper[5002]: E0930 12:21:46.675388 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:46 crc kubenswrapper[5002]: E0930 12:21:46.675599 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:46 crc kubenswrapper[5002]: E0930 12:21:46.675767 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.693618 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.709633 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.738816 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e10fed923eacb0c5409835ec689ec74b4ef68b770774b3fe03e05db05d63c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.753983 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.754656 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.754771 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.755408 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.755524 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:46Z","lastTransitionTime":"2025-09-30T12:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.756892 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dj2ln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68756e8a-d882-403f-acd7-2c41fce4446f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dj2ln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.774015 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be2f35cf-d5f8-40aa-ae79-11e075053735\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b282a6a59ec67140cc73df77f9505388fb4ff97dfdedf8394eeb46d65a2bda8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f6f78afdbf3b3bd7c28515e9ff9ad08cdccd4f4f44106f8aa5375638b3e8ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f6f78afdbf3b3bd7c28515e9ff9ad08cdccd4f4f44106f8aa5375638b3e8ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.791844 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98317299-6ce9-4196-a387-becb81461d6a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6057b9d238b4d4dd1329f0ffdbfd88f2f8869c06e5eb681041229a15dd4a08dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f9b5d0a287ad67a6f1008d06b0d371a522722280353ed9001f81198b9295ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0dba52ee6bddc02187198fe6c409abf3d336df93b558b27fc7f76e7ec8f2b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.812404 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.830212 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.851565 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.858395 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.858749 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.858968 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.859161 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.859371 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:46Z","lastTransitionTime":"2025-09-30T12:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.869576 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb569d449d2d8d49ee8d62c4815b625481cd5ddd882a135dac7465fe63fbc67e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4537e7bf982dba689bd81821609884ac5f2530e6efa7d48cd65e05ff64d2ec6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvvbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.902908 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.924608 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba33674850f0b121ea405e37924fcc085b3a3625e87432d9df38b2420920bf45\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.939983 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.954803 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.962105 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.962162 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.962181 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.962207 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.962226 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:46Z","lastTransitionTime":"2025-09-30T12:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.972285 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cf7d940b7333c33f9092bf1a630d298ae51fee0947c17ef09d902ae5dc1103c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:38Z\\\",\\\"message\\\":\\\"2025-09-30T12:20:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_59b1707c-38a6-4f61-b1a0-65dcba4fefc7\\\\n2025-09-30T12:20:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_59b1707c-38a6-4f61-b1a0-65dcba4fefc7 to /host/opt/cni/bin/\\\\n2025-09-30T12:20:53Z [verbose] multus-daemon started\\\\n2025-09-30T12:20:53Z [verbose] Readiness Indicator file check\\\\n2025-09-30T12:21:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:46 crc kubenswrapper[5002]: I0930 12:21:46.990331 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:46Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.020601 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"message\\\":\\\"Ds:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-machine-webhook\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-operator-machine-webhook_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-machine-webhook\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.250\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0930 12:21:44.676107 7084 obj_retry.go:420] Function iterateRetryResources for *v1.Pod ended (in 165.305µs)\\\\nF0930 12:21:44.676014 7084 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:21:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4pvsr_openshift-ovn-kubernetes(7095aa7a-d067-4977-bdc5-3a45a52a6a39)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:47Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.041296 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:47Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.057143 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:47Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.064979 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.065061 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.065079 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.065101 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.065121 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:47Z","lastTransitionTime":"2025-09-30T12:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.169362 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.169455 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.169467 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.169522 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.169543 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:47Z","lastTransitionTime":"2025-09-30T12:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.272258 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.272338 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.272363 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.272395 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.272418 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:47Z","lastTransitionTime":"2025-09-30T12:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.375290 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.375770 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.375961 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.376132 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.376309 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:47Z","lastTransitionTime":"2025-09-30T12:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.479632 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.479688 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.479700 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.479717 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.479728 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:47Z","lastTransitionTime":"2025-09-30T12:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.583307 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.583374 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.583396 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.583422 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.583439 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:47Z","lastTransitionTime":"2025-09-30T12:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.675882 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:47 crc kubenswrapper[5002]: E0930 12:21:47.676078 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.686908 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.687092 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.687239 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.687371 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.687554 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:47Z","lastTransitionTime":"2025-09-30T12:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.790358 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.790711 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.790830 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.790964 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.791067 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:47Z","lastTransitionTime":"2025-09-30T12:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.894782 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.894857 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.894876 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.894902 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.894921 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:47Z","lastTransitionTime":"2025-09-30T12:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.999096 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.999176 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.999207 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.999235 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:47 crc kubenswrapper[5002]: I0930 12:21:47.999257 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:47Z","lastTransitionTime":"2025-09-30T12:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.102040 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.102091 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.102110 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.102134 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.102151 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:48Z","lastTransitionTime":"2025-09-30T12:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.205910 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.205993 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.206017 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.206049 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.206073 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:48Z","lastTransitionTime":"2025-09-30T12:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.316300 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.316659 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.316845 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.316877 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.316971 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:48Z","lastTransitionTime":"2025-09-30T12:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.421132 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.421184 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.421200 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.421252 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.421270 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:48Z","lastTransitionTime":"2025-09-30T12:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.524817 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.524872 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.524891 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.524911 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.524924 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:48Z","lastTransitionTime":"2025-09-30T12:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.627961 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.628009 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.628019 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.628035 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.628048 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:48Z","lastTransitionTime":"2025-09-30T12:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.675436 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:48 crc kubenswrapper[5002]: E0930 12:21:48.675609 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.675437 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.675708 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:48 crc kubenswrapper[5002]: E0930 12:21:48.675947 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:48 crc kubenswrapper[5002]: E0930 12:21:48.675993 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.730839 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.730917 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.730940 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.730972 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.730997 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:48Z","lastTransitionTime":"2025-09-30T12:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.833914 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.833977 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.833995 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.834019 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.834036 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:48Z","lastTransitionTime":"2025-09-30T12:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.937134 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.937172 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.937184 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.937202 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:48 crc kubenswrapper[5002]: I0930 12:21:48.937214 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:48Z","lastTransitionTime":"2025-09-30T12:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.040351 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.040387 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.040395 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.040410 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.040418 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:49Z","lastTransitionTime":"2025-09-30T12:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.143958 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.144023 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.144040 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.144062 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.144079 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:49Z","lastTransitionTime":"2025-09-30T12:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.247413 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.247532 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.247548 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.247567 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.247580 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:49Z","lastTransitionTime":"2025-09-30T12:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.350417 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.350537 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.350589 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.350615 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.350634 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:49Z","lastTransitionTime":"2025-09-30T12:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.453208 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.453252 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.453261 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.453275 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.453286 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:49Z","lastTransitionTime":"2025-09-30T12:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.556359 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.556428 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.556445 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.556468 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.556525 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:49Z","lastTransitionTime":"2025-09-30T12:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.659287 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.659355 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.659379 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.659415 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.659436 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:49Z","lastTransitionTime":"2025-09-30T12:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.675722 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:49 crc kubenswrapper[5002]: E0930 12:21:49.675933 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.762280 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.762367 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.762379 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.762397 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.762409 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:49Z","lastTransitionTime":"2025-09-30T12:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.864755 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.864816 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.864838 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.864866 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.864887 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:49Z","lastTransitionTime":"2025-09-30T12:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.968110 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.968145 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.968153 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.968167 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:49 crc kubenswrapper[5002]: I0930 12:21:49.968180 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:49Z","lastTransitionTime":"2025-09-30T12:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.070754 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.070792 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.070800 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.070813 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.070823 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:50Z","lastTransitionTime":"2025-09-30T12:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.173991 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.174073 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.174096 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.174128 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.174147 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:50Z","lastTransitionTime":"2025-09-30T12:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.277089 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.277324 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.277406 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.277501 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.277605 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:50Z","lastTransitionTime":"2025-09-30T12:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.381605 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.381681 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.381703 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.381733 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.381755 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:50Z","lastTransitionTime":"2025-09-30T12:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.485272 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.485326 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.485347 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.485376 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.485397 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:50Z","lastTransitionTime":"2025-09-30T12:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.589291 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.590266 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.590442 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.590626 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.590784 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:50Z","lastTransitionTime":"2025-09-30T12:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.676024 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.676117 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.676085 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:50 crc kubenswrapper[5002]: E0930 12:21:50.676362 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:50 crc kubenswrapper[5002]: E0930 12:21:50.676541 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:50 crc kubenswrapper[5002]: E0930 12:21:50.676693 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.693759 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.693944 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.694013 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.694077 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.694142 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:50Z","lastTransitionTime":"2025-09-30T12:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.791466 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.791728 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.791768 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.791820 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.791882 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:50 crc kubenswrapper[5002]: E0930 12:21:50.792000 5002 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 12:21:50 crc kubenswrapper[5002]: E0930 12:21:50.792053 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 12:21:50 crc kubenswrapper[5002]: E0930 12:21:50.792083 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 12:21:50 crc kubenswrapper[5002]: E0930 12:21:50.792103 5002 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:21:50 crc kubenswrapper[5002]: E0930 12:21:50.792138 5002 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 12:21:50 crc kubenswrapper[5002]: E0930 12:21:50.792162 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 12:22:54.792117593 +0000 UTC m=+149.041799779 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 12:21:50 crc kubenswrapper[5002]: E0930 12:21:50.792240 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 12:21:50 crc kubenswrapper[5002]: E0930 12:21:50.792281 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 12:22:54.792254167 +0000 UTC m=+149.041936353 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:21:50 crc kubenswrapper[5002]: E0930 12:21:50.792302 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 12:21:50 crc kubenswrapper[5002]: E0930 12:21:50.792317 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 12:22:54.792305688 +0000 UTC m=+149.041987864 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 12:21:50 crc kubenswrapper[5002]: E0930 12:21:50.792329 5002 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:21:50 crc kubenswrapper[5002]: E0930 12:21:50.792440 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 12:22:54.792410341 +0000 UTC m=+149.042092527 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 12:21:50 crc kubenswrapper[5002]: E0930 12:21:50.792680 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:54.792655227 +0000 UTC m=+149.042337383 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.796317 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.796355 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.796369 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.796386 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.796397 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:50Z","lastTransitionTime":"2025-09-30T12:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.899686 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.899743 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.899760 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.899783 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:50 crc kubenswrapper[5002]: I0930 12:21:50.899800 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:50Z","lastTransitionTime":"2025-09-30T12:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.002290 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.002341 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.002355 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.002377 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.002391 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:51Z","lastTransitionTime":"2025-09-30T12:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.106262 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.106330 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.106345 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.106370 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.106392 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:51Z","lastTransitionTime":"2025-09-30T12:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.209562 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.209618 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.209628 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.209644 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.209654 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:51Z","lastTransitionTime":"2025-09-30T12:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.312270 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.312727 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.312804 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.312907 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.312978 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:51Z","lastTransitionTime":"2025-09-30T12:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.415567 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.415622 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.415639 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.415663 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.415680 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:51Z","lastTransitionTime":"2025-09-30T12:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.518452 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.518526 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.518544 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.518567 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.518632 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:51Z","lastTransitionTime":"2025-09-30T12:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.621904 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.621972 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.621996 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.622025 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.622046 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:51Z","lastTransitionTime":"2025-09-30T12:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.675649 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:51 crc kubenswrapper[5002]: E0930 12:21:51.675857 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.725376 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.725431 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.725440 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.725459 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.725502 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:51Z","lastTransitionTime":"2025-09-30T12:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.828980 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.829061 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.829083 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.829116 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.829137 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:51Z","lastTransitionTime":"2025-09-30T12:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.931413 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.931449 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.931466 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.931510 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:51 crc kubenswrapper[5002]: I0930 12:21:51.931522 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:51Z","lastTransitionTime":"2025-09-30T12:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.034631 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.034697 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.034708 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.034734 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.034748 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:52Z","lastTransitionTime":"2025-09-30T12:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.138825 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.138893 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.138911 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.138944 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.138963 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:52Z","lastTransitionTime":"2025-09-30T12:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.241618 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.241691 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.241711 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.241734 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.241753 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:52Z","lastTransitionTime":"2025-09-30T12:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.344183 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.344231 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.344242 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.344259 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.344273 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:52Z","lastTransitionTime":"2025-09-30T12:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.446661 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.446702 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.446713 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.446728 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.446740 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:52Z","lastTransitionTime":"2025-09-30T12:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.550197 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.550247 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.550260 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.550278 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.550291 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:52Z","lastTransitionTime":"2025-09-30T12:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.653082 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.653145 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.653159 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.653179 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.653192 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:52Z","lastTransitionTime":"2025-09-30T12:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.675723 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.675812 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:52 crc kubenswrapper[5002]: E0930 12:21:52.675859 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.675876 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:52 crc kubenswrapper[5002]: E0930 12:21:52.675962 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:52 crc kubenswrapper[5002]: E0930 12:21:52.676062 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.755780 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.755829 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.755844 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.755862 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.755875 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:52Z","lastTransitionTime":"2025-09-30T12:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.858994 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.859028 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.859036 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.859049 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.859057 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:52Z","lastTransitionTime":"2025-09-30T12:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.961614 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.961661 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.961673 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.961692 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:52 crc kubenswrapper[5002]: I0930 12:21:52.961705 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:52Z","lastTransitionTime":"2025-09-30T12:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.064648 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.064712 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.064725 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.064743 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.064791 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:53Z","lastTransitionTime":"2025-09-30T12:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.168263 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.168347 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.168370 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.168402 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.168424 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:53Z","lastTransitionTime":"2025-09-30T12:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.270920 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.270963 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.270971 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.270984 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.270993 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:53Z","lastTransitionTime":"2025-09-30T12:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.373833 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.373904 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.373925 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.373952 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.373972 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:53Z","lastTransitionTime":"2025-09-30T12:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.476078 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.476118 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.476127 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.476142 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.476152 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:53Z","lastTransitionTime":"2025-09-30T12:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.578785 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.579121 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.579390 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.579623 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.579810 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:53Z","lastTransitionTime":"2025-09-30T12:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.675094 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:53 crc kubenswrapper[5002]: E0930 12:21:53.676353 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.683370 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.683439 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.683462 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.683534 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.683559 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:53Z","lastTransitionTime":"2025-09-30T12:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.786415 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.786750 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.786794 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.786820 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.786838 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:53Z","lastTransitionTime":"2025-09-30T12:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.890452 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.890542 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.890561 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.890584 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.890601 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:53Z","lastTransitionTime":"2025-09-30T12:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.993861 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.993962 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.993981 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.994009 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:53 crc kubenswrapper[5002]: I0930 12:21:53.994031 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:53Z","lastTransitionTime":"2025-09-30T12:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.096299 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.096341 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.096350 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.096364 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.096374 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:54Z","lastTransitionTime":"2025-09-30T12:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.199673 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.199781 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.199805 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.199829 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.199852 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:54Z","lastTransitionTime":"2025-09-30T12:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.302706 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.302742 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.302754 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.302770 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.302784 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:54Z","lastTransitionTime":"2025-09-30T12:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.405591 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.405670 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.405695 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.405719 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.405735 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:54Z","lastTransitionTime":"2025-09-30T12:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.508794 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.508849 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.508866 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.508891 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.508908 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:54Z","lastTransitionTime":"2025-09-30T12:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.611946 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.611977 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.611986 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.612003 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.612012 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:54Z","lastTransitionTime":"2025-09-30T12:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.676092 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:54 crc kubenswrapper[5002]: E0930 12:21:54.676226 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.676293 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:54 crc kubenswrapper[5002]: E0930 12:21:54.676356 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.676108 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:54 crc kubenswrapper[5002]: E0930 12:21:54.676668 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.715036 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.715095 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.715112 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.715136 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.715153 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:54Z","lastTransitionTime":"2025-09-30T12:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.780772 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.780839 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.780851 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.780866 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.780876 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:54Z","lastTransitionTime":"2025-09-30T12:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:54 crc kubenswrapper[5002]: E0930 12:21:54.793490 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:54Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.798290 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.798356 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.798369 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.798383 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.798394 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:54Z","lastTransitionTime":"2025-09-30T12:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:54 crc kubenswrapper[5002]: E0930 12:21:54.808842 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:54Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.812172 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.812344 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.812550 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.812680 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.812935 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:54Z","lastTransitionTime":"2025-09-30T12:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:54 crc kubenswrapper[5002]: E0930 12:21:54.825168 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:54Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.828839 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.828879 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.828888 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.828902 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.828911 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:54Z","lastTransitionTime":"2025-09-30T12:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:54 crc kubenswrapper[5002]: E0930 12:21:54.842825 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:54Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.846141 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.846370 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.846542 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.846701 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.846853 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:54Z","lastTransitionTime":"2025-09-30T12:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:54 crc kubenswrapper[5002]: E0930 12:21:54.857895 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2268e3e5-9437-4733-80ec-6085372d1c27\\\",\\\"systemUUID\\\":\\\"2730bf9d-d559-45ca-96f1-192133954467\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:54Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:54 crc kubenswrapper[5002]: E0930 12:21:54.858060 5002 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.859566 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.859594 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.859603 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.859617 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.859625 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:54Z","lastTransitionTime":"2025-09-30T12:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.962713 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.962751 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.962762 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.962777 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:54 crc kubenswrapper[5002]: I0930 12:21:54.962788 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:54Z","lastTransitionTime":"2025-09-30T12:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.065833 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.066137 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.066271 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.066444 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.066627 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:55Z","lastTransitionTime":"2025-09-30T12:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.171230 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.171323 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.171352 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.171388 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.171425 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:55Z","lastTransitionTime":"2025-09-30T12:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.274338 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.275075 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.275404 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.275631 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.275808 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:55Z","lastTransitionTime":"2025-09-30T12:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.378517 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.378559 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.378576 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.378597 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.378614 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:55Z","lastTransitionTime":"2025-09-30T12:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.483170 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.483237 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.483254 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.483278 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.483295 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:55Z","lastTransitionTime":"2025-09-30T12:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.587386 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.587461 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.587524 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.587555 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.587578 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:55Z","lastTransitionTime":"2025-09-30T12:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.675952 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:55 crc kubenswrapper[5002]: E0930 12:21:55.676157 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.690996 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.691411 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.691789 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.692062 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.692303 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:55Z","lastTransitionTime":"2025-09-30T12:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.796070 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.796304 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.796440 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.796602 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.796677 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:55Z","lastTransitionTime":"2025-09-30T12:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.900219 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.900708 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.900855 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.900941 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:55 crc kubenswrapper[5002]: I0930 12:21:55.901032 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:55Z","lastTransitionTime":"2025-09-30T12:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.004717 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.004801 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.004824 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.004853 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.004874 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:56Z","lastTransitionTime":"2025-09-30T12:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.108429 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.108567 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.108594 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.108683 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.109124 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:56Z","lastTransitionTime":"2025-09-30T12:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.212283 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.212761 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.213166 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.213410 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.213714 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:56Z","lastTransitionTime":"2025-09-30T12:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.316939 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.316996 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.317014 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.317039 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.317058 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:56Z","lastTransitionTime":"2025-09-30T12:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.420463 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.420585 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.420608 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.420635 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.420653 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:56Z","lastTransitionTime":"2025-09-30T12:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.523949 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.524002 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.524020 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.524045 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.524062 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:56Z","lastTransitionTime":"2025-09-30T12:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.627519 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.627580 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.627604 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.627632 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.627653 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:56Z","lastTransitionTime":"2025-09-30T12:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.675859 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.675941 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:56 crc kubenswrapper[5002]: E0930 12:21:56.676069 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:56 crc kubenswrapper[5002]: E0930 12:21:56.676197 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.676725 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:56 crc kubenswrapper[5002]: E0930 12:21:56.676872 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.697797 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fe2fccc-790d-41b7-a322-0f99dbd9b3e2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a46e01897629261ad42fc3fa2cc7fdf56a2a020864a1088d9e58edf61aac296e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://017ebbf00a59d33f36e3515dd280bc0c4363da8b3e621339185ebdb3c8728148\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee0dc371a43f3780e1178e29ac491c7a61786fccb37527dd2f21d800adbbdae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba33674850f0b121ea405e37924fcc085b3a3625e87432d9df38b2420920bf45\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49c139b2c0ae558d108d47a8452c2a8348fc164761d468e1b9b98ff2c7407f20\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"le observer\\\\nW0930 12:20:46.369838 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 12:20:46.370037 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 12:20:46.371113 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1995022772/tls.crt::/tmp/serving-cert-1995022772/tls.key\\\\\\\"\\\\nI0930 12:20:46.549991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 12:20:46.558658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 12:20:46.558692 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 12:20:46.558723 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 12:20:46.558735 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 12:20:46.572632 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 12:20:46.572689 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 12:20:46.572718 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 12:20:46.572725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 12:20:46.572737 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 12:20:46.572744 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 12:20:46.573045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 12:20:46.574462 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b99e73bb34b117cdb12e2475fc8e7d0ffdcb7fc44a000282e7776473eca209ea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24b1bc36a80f3925846e76f49c3347f1e2f14555096b3a514b06c6eb2e8fe02c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.711342 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d9568103a5257388af7a54e676246ab6ee732abd4516bd06e2cd7471f0ec08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.724560 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f2c4d225c4e4f2f04337915618c52ced7280fe45dc735fd9b1aca03f95f7f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.734227 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.734289 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.734308 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.734334 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.734353 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:56Z","lastTransitionTime":"2025-09-30T12:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.738026 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.749438 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3b6a7b0-1c44-43ee-a789-36bb0cd51b87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb569d449d2d8d49ee8d62c4815b625481cd5ddd882a135dac7465fe63fbc67e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4537e7bf982dba689bd81821609884ac5f2530e6efa7d48cd65e05ff64d2ec6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ffrqq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvvbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.769258 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1d4eefd-8292-45a1-af4b-4a4906cccd2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a70de7ac636795f52b3bdd2a2b938494cabcf80e1fb9b321a309f4719edf4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://240fb1d0e11ae7b3459121ba32094ddfa79dc71a1180befebcaf594859a63d86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91de7b5c3b52d03b05c151b66857b5af5f3b9d228f3f1af32a7f504225e1e739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cb1aca4e313653c2d83c0d4fb8dfab1e6f8c8220c806cbcdcf154f2d9ab6e29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef59c1ec617a61effd91da12779f13866a50fe611d078330f0275e8d6ef1c27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a814062d9554255b768b26ad452946c64836707ac0e68c9e3f107dc587c0ccb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74db2c969d2f9049fd8e8130a9fd50b312f3fce049ab0e814390dbff3b0cb596\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a367821c9b4b8799fb789ca39d487c519925da11ebb80e94a0123e02cf78964e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.780814 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ls6n9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ebe21b2-eb85-4b30-b911-78f6619d07f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e83e79da4b7b8f89a61048c7583c9ff7ccb7ac910744dfbb572c189c5676741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frsmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ls6n9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.793891 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90d5183f-90e1-421f-bd64-fd7f05605586\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2e0f074c0f31a5432cea24faa4f95c276f59cdf48bca8ceecd065b4cdff5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://902cc950dc462d86c9de62cac5d04e99aa3240952f43e8a5f07884e70ff84b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://047c48f1e175cddc7dc981a8d9027783f4a90c21cb444050771de50fb02e7a9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ac789dc45e907d68fe54ec7bc2ea72dbcc5f46cd55c113c014203d5e709d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.805363 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.818270 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ttfn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bd9f18d-bfb3-4bd7-9a87-242029cd3200\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cf7d940b7333c33f9092bf1a630d298ae51fee0947c17ef09d902ae5dc1103c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:38Z\\\",\\\"message\\\":\\\"2025-09-30T12:20:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_59b1707c-38a6-4f61-b1a0-65dcba4fefc7\\\\n2025-09-30T12:20:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_59b1707c-38a6-4f61-b1a0-65dcba4fefc7 to /host/opt/cni/bin/\\\\n2025-09-30T12:20:53Z [verbose] multus-daemon started\\\\n2025-09-30T12:20:53Z [verbose] Readiness Indicator file check\\\\n2025-09-30T12:21:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:21:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvf2n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ttfn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.830206 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bc40360d8d17f580c7ded1e33762aa443998e4202892e2f2effc08b6659a4a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbwzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ncbb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.837636 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.837711 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.837729 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.837752 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.837767 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:56Z","lastTransitionTime":"2025-09-30T12:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.856229 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7095aa7a-d067-4977-bdc5-3a45a52a6a39\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T12:21:44Z\\\",\\\"message\\\":\\\"Ds:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-machine-webhook\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-operator-machine-webhook_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-machine-webhook\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.250\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0930 12:21:44.676107 7084 obj_retry.go:420] Function iterateRetryResources for *v1.Pod ended (in 165.305µs)\\\\nF0930 12:21:44.676014 7084 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T12:21:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4pvsr_openshift-ovn-kubernetes(7095aa7a-d067-4977-bdc5-3a45a52a6a39)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bntn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4pvsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.868785 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30f55f9a0bcab13420179c62f63cd5e785afe964ac86286450fcd91d7ca0562e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306dd7840789c5fe2565d819a744a57e330cb1fcc1ab1ca4b3c5ebcfd0361d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.878922 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98317299-6ce9-4196-a387-becb81461d6a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6057b9d238b4d4dd1329f0ffdbfd88f2f8869c06e5eb681041229a15dd4a08dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f9b5d0a287ad67a6f1008d06b0d371a522722280353ed9001f81198b9295ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0dba52ee6bddc02187198fe6c409abf3d336df93b558b27fc7f76e7ec8f2b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84c3302e7daef1722f84b63d532dabe8e9f8cf5679478df9b92b73273b3a3000\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.892358 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.908257 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p45pn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0334cb8-cf38-4e50-80e8-2b81d8d46ae7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca035599120eed38b0abc1893076e4d1ecab6b1ce53605f53ff30fa2782b63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nvm4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p45pn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.926205 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7df259-4156-484c-bedd-543ca42f2970\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e10fed923eacb0c5409835ec689ec74b4ef68b770774b3fe03e05db05d63c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6865c45580d6dc6acaf6dede9483213728b45f602a6df3a6d9264bbec520db66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://042d2758098735539d73ee03bcaf5bfaaa765ef2337975341c63511bff33e4ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://037e6837e485e2662c487cea7b28bd28b85c3aa2dc698edd3d16b4269907dd83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a451f8cf7baf508ad6ceda7f19f3117804e5698e678bedf8e4187f60847aaac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edf6916be3680c65376e28fb893a869599eb07d870ac5337a38fed83b4dbf28d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7019df241ab90e98201c474287f02919d99bca60c432a8f61ac68d29ce53bed8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bsjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2lqq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.940640 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.941720 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.941823 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.942093 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.942174 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:56Z","lastTransitionTime":"2025-09-30T12:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.942041 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dj2ln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68756e8a-d882-403f-acd7-2c41fce4446f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:21:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdgr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:21:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dj2ln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:56 crc kubenswrapper[5002]: I0930 12:21:56.958147 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be2f35cf-d5f8-40aa-ae79-11e075053735\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T12:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b282a6a59ec67140cc73df77f9505388fb4ff97dfdedf8394eeb46d65a2bda8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T12:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f6f78afdbf3b3bd7c28515e9ff9ad08cdccd4f4f44106f8aa5375638b3e8ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f6f78afdbf3b3bd7c28515e9ff9ad08cdccd4f4f44106f8aa5375638b3e8ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T12:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T12:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T12:20:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T12:21:56Z is after 2025-08-24T17:21:41Z" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.044842 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.044900 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.044915 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.044937 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.044954 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:57Z","lastTransitionTime":"2025-09-30T12:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.148382 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.148703 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.148801 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.148889 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.148997 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:57Z","lastTransitionTime":"2025-09-30T12:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.252557 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.252616 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.252633 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.252657 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.252676 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:57Z","lastTransitionTime":"2025-09-30T12:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.362912 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.362961 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.362977 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.363000 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.363015 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:57Z","lastTransitionTime":"2025-09-30T12:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.465965 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.466012 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.466021 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.466037 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.466047 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:57Z","lastTransitionTime":"2025-09-30T12:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.567664 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.567742 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.567760 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.567787 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.567806 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:57Z","lastTransitionTime":"2025-09-30T12:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.670288 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.670364 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.670376 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.670392 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.670405 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:57Z","lastTransitionTime":"2025-09-30T12:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.675769 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:57 crc kubenswrapper[5002]: E0930 12:21:57.676201 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.773317 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.773769 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.773788 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.773816 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.773833 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:57Z","lastTransitionTime":"2025-09-30T12:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.877776 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.877833 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.877847 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.877865 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.877878 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:57Z","lastTransitionTime":"2025-09-30T12:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.980813 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.980872 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.980933 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.980955 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:57 crc kubenswrapper[5002]: I0930 12:21:57.980969 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:57Z","lastTransitionTime":"2025-09-30T12:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.083381 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.083427 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.083439 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.083454 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.083466 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:58Z","lastTransitionTime":"2025-09-30T12:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.187158 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.187346 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.187372 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.187395 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.187412 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:58Z","lastTransitionTime":"2025-09-30T12:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.289732 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.289789 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.289806 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.289829 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.289846 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:58Z","lastTransitionTime":"2025-09-30T12:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.392463 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.392572 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.392596 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.392626 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.392647 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:58Z","lastTransitionTime":"2025-09-30T12:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.495828 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.495911 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.495929 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.495952 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.495965 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:58Z","lastTransitionTime":"2025-09-30T12:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.600757 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.600845 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.600871 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.600907 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.600947 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:58Z","lastTransitionTime":"2025-09-30T12:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.675543 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.675606 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:21:58 crc kubenswrapper[5002]: E0930 12:21:58.676040 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:21:58 crc kubenswrapper[5002]: E0930 12:21:58.676178 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.676411 5002 scope.go:117] "RemoveContainer" containerID="9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5" Sep 30 12:21:58 crc kubenswrapper[5002]: E0930 12:21:58.676620 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4pvsr_openshift-ovn-kubernetes(7095aa7a-d067-4977-bdc5-3a45a52a6a39)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.676732 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:21:58 crc kubenswrapper[5002]: E0930 12:21:58.676801 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.703577 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.703642 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.703659 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.703682 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.703701 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:58Z","lastTransitionTime":"2025-09-30T12:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.807425 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.807489 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.807501 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.807522 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.807530 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:58Z","lastTransitionTime":"2025-09-30T12:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.911087 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.911216 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.911241 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.911272 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:58 crc kubenswrapper[5002]: I0930 12:21:58.911294 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:58Z","lastTransitionTime":"2025-09-30T12:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.013841 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.013936 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.013974 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.014005 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.014027 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:59Z","lastTransitionTime":"2025-09-30T12:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.117915 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.117984 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.118002 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.118026 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.118042 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:59Z","lastTransitionTime":"2025-09-30T12:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.221039 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.221098 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.221127 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.221169 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.221264 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:59Z","lastTransitionTime":"2025-09-30T12:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.324510 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.324580 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.324594 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.324619 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.324633 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:59Z","lastTransitionTime":"2025-09-30T12:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.427698 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.427760 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.427778 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.427803 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.427821 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:59Z","lastTransitionTime":"2025-09-30T12:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.531174 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.531278 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.531303 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.531329 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.531347 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:59Z","lastTransitionTime":"2025-09-30T12:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.634647 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.634721 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.634739 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.634763 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.634782 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:59Z","lastTransitionTime":"2025-09-30T12:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.675858 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:21:59 crc kubenswrapper[5002]: E0930 12:21:59.676107 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.737983 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.738064 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.738087 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.738114 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.738132 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:59Z","lastTransitionTime":"2025-09-30T12:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.841164 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.841230 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.841250 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.841280 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.841297 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:59Z","lastTransitionTime":"2025-09-30T12:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.944310 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.944378 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.944398 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.944423 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:21:59 crc kubenswrapper[5002]: I0930 12:21:59.944441 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:21:59Z","lastTransitionTime":"2025-09-30T12:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.047644 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.047717 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.047740 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.047768 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.047790 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:00Z","lastTransitionTime":"2025-09-30T12:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.151035 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.151110 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.151130 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.151154 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.151174 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:00Z","lastTransitionTime":"2025-09-30T12:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.254626 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.254684 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.254702 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.254725 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.254742 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:00Z","lastTransitionTime":"2025-09-30T12:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.357911 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.357974 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.357991 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.358016 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.358035 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:00Z","lastTransitionTime":"2025-09-30T12:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.460856 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.460910 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.460935 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.460963 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.460983 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:00Z","lastTransitionTime":"2025-09-30T12:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.564334 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.564380 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.564392 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.564412 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.564424 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:00Z","lastTransitionTime":"2025-09-30T12:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.666294 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.666378 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.666401 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.666424 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.666439 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:00Z","lastTransitionTime":"2025-09-30T12:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.675597 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:22:00 crc kubenswrapper[5002]: E0930 12:22:00.675697 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.675766 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:22:00 crc kubenswrapper[5002]: E0930 12:22:00.675893 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.675904 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:22:00 crc kubenswrapper[5002]: E0930 12:22:00.676000 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.769158 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.769217 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.769226 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.769242 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.769253 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:00Z","lastTransitionTime":"2025-09-30T12:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.871816 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.871870 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.871886 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.871905 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.871920 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:00Z","lastTransitionTime":"2025-09-30T12:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.974673 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.974736 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.974754 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.974780 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:00 crc kubenswrapper[5002]: I0930 12:22:00.974798 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:00Z","lastTransitionTime":"2025-09-30T12:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.078836 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.078898 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.078915 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.078938 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.078957 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:01Z","lastTransitionTime":"2025-09-30T12:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.181650 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.181720 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.181741 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.181756 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.181767 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:01Z","lastTransitionTime":"2025-09-30T12:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.284706 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.284760 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.284778 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.284802 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.284819 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:01Z","lastTransitionTime":"2025-09-30T12:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.387938 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.388017 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.388043 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.388073 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.388099 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:01Z","lastTransitionTime":"2025-09-30T12:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.491208 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.491283 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.491299 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.491317 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.491353 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:01Z","lastTransitionTime":"2025-09-30T12:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.594155 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.594189 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.594198 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.594211 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.594220 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:01Z","lastTransitionTime":"2025-09-30T12:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.675909 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:22:01 crc kubenswrapper[5002]: E0930 12:22:01.676082 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.696665 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.696743 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.696766 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.696796 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.696819 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:01Z","lastTransitionTime":"2025-09-30T12:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.799498 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.799530 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.799540 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.799553 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.799562 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:01Z","lastTransitionTime":"2025-09-30T12:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.901608 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.901662 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.901680 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.901706 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:01 crc kubenswrapper[5002]: I0930 12:22:01.901725 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:01Z","lastTransitionTime":"2025-09-30T12:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.004345 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.004384 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.004393 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.004407 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.004415 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:02Z","lastTransitionTime":"2025-09-30T12:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.107028 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.107089 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.107110 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.107134 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.107149 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:02Z","lastTransitionTime":"2025-09-30T12:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.210156 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.210186 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.210194 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.210206 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.210216 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:02Z","lastTransitionTime":"2025-09-30T12:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.312853 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.312898 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.312908 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.312922 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.312931 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:02Z","lastTransitionTime":"2025-09-30T12:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.416955 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.417019 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.417037 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.417063 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.417081 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:02Z","lastTransitionTime":"2025-09-30T12:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.520357 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.520442 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.520466 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.520539 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.520565 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:02Z","lastTransitionTime":"2025-09-30T12:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.623425 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.623539 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.623564 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.623593 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.623615 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:02Z","lastTransitionTime":"2025-09-30T12:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.675543 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.675585 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:22:02 crc kubenswrapper[5002]: E0930 12:22:02.675742 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.675785 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:22:02 crc kubenswrapper[5002]: E0930 12:22:02.676048 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:22:02 crc kubenswrapper[5002]: E0930 12:22:02.676169 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.725849 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.725912 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.725930 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.725954 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.725974 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:02Z","lastTransitionTime":"2025-09-30T12:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.830285 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.830354 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.830373 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.830396 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.830413 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:02Z","lastTransitionTime":"2025-09-30T12:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.933466 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.933597 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.933621 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.933658 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:02 crc kubenswrapper[5002]: I0930 12:22:02.933681 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:02Z","lastTransitionTime":"2025-09-30T12:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.036930 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.037007 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.037021 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.037036 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.037045 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:03Z","lastTransitionTime":"2025-09-30T12:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.139858 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.139896 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.139907 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.139922 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.139934 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:03Z","lastTransitionTime":"2025-09-30T12:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.242724 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.242776 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.242794 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.242819 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.242837 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:03Z","lastTransitionTime":"2025-09-30T12:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.345857 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.346308 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.346329 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.346372 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.346394 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:03Z","lastTransitionTime":"2025-09-30T12:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.450402 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.450465 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.450522 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.450550 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.450571 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:03Z","lastTransitionTime":"2025-09-30T12:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.554872 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.554941 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.554960 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.554983 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.554999 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:03Z","lastTransitionTime":"2025-09-30T12:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.657727 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.657780 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.657798 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.657826 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.657838 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:03Z","lastTransitionTime":"2025-09-30T12:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.675694 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:22:03 crc kubenswrapper[5002]: E0930 12:22:03.675867 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.760791 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.760846 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.760868 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.760896 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.760917 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:03Z","lastTransitionTime":"2025-09-30T12:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.863754 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.863817 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.863834 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.863858 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.863878 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:03Z","lastTransitionTime":"2025-09-30T12:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.967329 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.967402 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.967422 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.967446 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:03 crc kubenswrapper[5002]: I0930 12:22:03.967464 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:03Z","lastTransitionTime":"2025-09-30T12:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.069761 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.069829 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.069846 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.069871 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.069889 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:04Z","lastTransitionTime":"2025-09-30T12:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.172760 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.172822 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.172844 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.172870 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.172888 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:04Z","lastTransitionTime":"2025-09-30T12:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.276259 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.276327 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.276350 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.276378 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.276400 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:04Z","lastTransitionTime":"2025-09-30T12:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.379211 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.379271 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.379290 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.379313 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.379331 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:04Z","lastTransitionTime":"2025-09-30T12:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.482288 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.482344 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.482363 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.482391 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.482414 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:04Z","lastTransitionTime":"2025-09-30T12:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.585316 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.585395 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.585418 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.585447 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.585519 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:04Z","lastTransitionTime":"2025-09-30T12:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.675545 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.675598 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.675681 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:22:04 crc kubenswrapper[5002]: E0930 12:22:04.675820 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:22:04 crc kubenswrapper[5002]: E0930 12:22:04.676035 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:22:04 crc kubenswrapper[5002]: E0930 12:22:04.676121 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.718747 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.718878 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.718904 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.718975 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.719000 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:04Z","lastTransitionTime":"2025-09-30T12:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.821456 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.821550 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.821564 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.821582 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.821594 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:04Z","lastTransitionTime":"2025-09-30T12:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.924045 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.924109 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.924128 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.924155 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:04 crc kubenswrapper[5002]: I0930 12:22:04.924173 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:04Z","lastTransitionTime":"2025-09-30T12:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.026564 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.026636 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.026657 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.026682 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.026700 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:05Z","lastTransitionTime":"2025-09-30T12:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.129964 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.130019 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.130037 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.130062 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.130078 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:05Z","lastTransitionTime":"2025-09-30T12:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.233346 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.233411 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.233431 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.233455 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.233505 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:05Z","lastTransitionTime":"2025-09-30T12:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.235630 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.235677 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.235696 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.235716 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.235735 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T12:22:05Z","lastTransitionTime":"2025-09-30T12:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.304154 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-ptww2"] Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.304676 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ptww2" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.308306 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.308919 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.308937 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.311712 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.353231 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/146ae66a-f672-4c96-b523-6352170084fc-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-ptww2\" (UID: \"146ae66a-f672-4c96-b523-6352170084fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ptww2" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.353791 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/146ae66a-f672-4c96-b523-6352170084fc-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-ptww2\" (UID: \"146ae66a-f672-4c96-b523-6352170084fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ptww2" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.354143 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/146ae66a-f672-4c96-b523-6352170084fc-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-ptww2\" (UID: \"146ae66a-f672-4c96-b523-6352170084fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ptww2" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.354559 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/146ae66a-f672-4c96-b523-6352170084fc-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-ptww2\" (UID: \"146ae66a-f672-4c96-b523-6352170084fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ptww2" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.356711 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/146ae66a-f672-4c96-b523-6352170084fc-service-ca\") pod \"cluster-version-operator-5c965bbfc6-ptww2\" (UID: \"146ae66a-f672-4c96-b523-6352170084fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ptww2" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.394466 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvvbg" podStartSLOduration=75.39444981 podStartE2EDuration="1m15.39444981s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:05.364613414 +0000 UTC m=+99.614295640" watchObservedRunningTime="2025-09-30 12:22:05.39444981 +0000 UTC m=+99.644131956" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.415443 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=77.415425599 podStartE2EDuration="1m17.415425599s" podCreationTimestamp="2025-09-30 12:20:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:05.395285804 +0000 UTC m=+99.644967990" watchObservedRunningTime="2025-09-30 12:22:05.415425599 +0000 UTC m=+99.665107745" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.445725 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=79.445706219 podStartE2EDuration="1m19.445706219s" podCreationTimestamp="2025-09-30 12:20:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:05.420257525 +0000 UTC m=+99.669939681" watchObservedRunningTime="2025-09-30 12:22:05.445706219 +0000 UTC m=+99.695388355" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.457337 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/146ae66a-f672-4c96-b523-6352170084fc-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-ptww2\" (UID: \"146ae66a-f672-4c96-b523-6352170084fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ptww2" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.457376 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/146ae66a-f672-4c96-b523-6352170084fc-service-ca\") pod \"cluster-version-operator-5c965bbfc6-ptww2\" (UID: \"146ae66a-f672-4c96-b523-6352170084fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ptww2" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.457412 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/146ae66a-f672-4c96-b523-6352170084fc-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-ptww2\" (UID: \"146ae66a-f672-4c96-b523-6352170084fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ptww2" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.457437 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/146ae66a-f672-4c96-b523-6352170084fc-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-ptww2\" (UID: \"146ae66a-f672-4c96-b523-6352170084fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ptww2" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.457451 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/146ae66a-f672-4c96-b523-6352170084fc-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-ptww2\" (UID: \"146ae66a-f672-4c96-b523-6352170084fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ptww2" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.457481 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/146ae66a-f672-4c96-b523-6352170084fc-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-ptww2\" (UID: \"146ae66a-f672-4c96-b523-6352170084fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ptww2" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.457722 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/146ae66a-f672-4c96-b523-6352170084fc-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-ptww2\" (UID: \"146ae66a-f672-4c96-b523-6352170084fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ptww2" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.458284 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/146ae66a-f672-4c96-b523-6352170084fc-service-ca\") pod \"cluster-version-operator-5c965bbfc6-ptww2\" (UID: \"146ae66a-f672-4c96-b523-6352170084fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ptww2" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.464107 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/146ae66a-f672-4c96-b523-6352170084fc-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-ptww2\" (UID: \"146ae66a-f672-4c96-b523-6352170084fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ptww2" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.478080 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/146ae66a-f672-4c96-b523-6352170084fc-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-ptww2\" (UID: \"146ae66a-f672-4c96-b523-6352170084fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ptww2" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.483020 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=74.48284961 podStartE2EDuration="1m14.48284961s" podCreationTimestamp="2025-09-30 12:20:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:05.469717992 +0000 UTC m=+99.719400138" watchObservedRunningTime="2025-09-30 12:22:05.48284961 +0000 UTC m=+99.732531756" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.493714 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-ls6n9" podStartSLOduration=76.493693204 podStartE2EDuration="1m16.493693204s" podCreationTimestamp="2025-09-30 12:20:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:05.483335734 +0000 UTC m=+99.733017880" watchObservedRunningTime="2025-09-30 12:22:05.493693204 +0000 UTC m=+99.743375350" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.494363 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podStartSLOduration=76.494356074 podStartE2EDuration="1m16.494356074s" podCreationTimestamp="2025-09-30 12:20:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:05.494091985 +0000 UTC m=+99.743774131" watchObservedRunningTime="2025-09-30 12:22:05.494356074 +0000 UTC m=+99.744038220" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.569101 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-ttfn8" podStartSLOduration=76.569081549 podStartE2EDuration="1m16.569081549s" podCreationTimestamp="2025-09-30 12:20:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:05.568899364 +0000 UTC m=+99.818581560" watchObservedRunningTime="2025-09-30 12:22:05.569081549 +0000 UTC m=+99.818763695" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.579085 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-p45pn" podStartSLOduration=76.579069649 podStartE2EDuration="1m16.579069649s" podCreationTimestamp="2025-09-30 12:20:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:05.578690999 +0000 UTC m=+99.828373155" watchObservedRunningTime="2025-09-30 12:22:05.579069649 +0000 UTC m=+99.828751795" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.594005 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-2lqq2" podStartSLOduration=76.593991108 podStartE2EDuration="1m16.593991108s" podCreationTimestamp="2025-09-30 12:20:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:05.59336259 +0000 UTC m=+99.843044766" watchObservedRunningTime="2025-09-30 12:22:05.593991108 +0000 UTC m=+99.843673254" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.615199 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=32.615174622 podStartE2EDuration="32.615174622s" podCreationTimestamp="2025-09-30 12:21:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:05.61471666 +0000 UTC m=+99.864398806" watchObservedRunningTime="2025-09-30 12:22:05.615174622 +0000 UTC m=+99.864856808" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.625669 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=52.625651156 podStartE2EDuration="52.625651156s" podCreationTimestamp="2025-09-30 12:21:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:05.625203893 +0000 UTC m=+99.874886049" watchObservedRunningTime="2025-09-30 12:22:05.625651156 +0000 UTC m=+99.875333302" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.630505 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ptww2" Sep 30 12:22:05 crc kubenswrapper[5002]: I0930 12:22:05.675206 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:22:05 crc kubenswrapper[5002]: E0930 12:22:05.675329 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:22:06 crc kubenswrapper[5002]: I0930 12:22:06.388243 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ptww2" event={"ID":"146ae66a-f672-4c96-b523-6352170084fc","Type":"ContainerStarted","Data":"842ded516ecfa56584b8b5ffb1c54f6d1a3bf068f8c6361faaf5841ba3ff032a"} Sep 30 12:22:06 crc kubenswrapper[5002]: I0930 12:22:06.388341 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ptww2" event={"ID":"146ae66a-f672-4c96-b523-6352170084fc","Type":"ContainerStarted","Data":"fffe986199c99b3c365daf0d8fa7052d9605d742ae7c1442d40d484df1828ae0"} Sep 30 12:22:06 crc kubenswrapper[5002]: I0930 12:22:06.675784 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:22:06 crc kubenswrapper[5002]: I0930 12:22:06.676046 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:22:06 crc kubenswrapper[5002]: E0930 12:22:06.676046 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:22:06 crc kubenswrapper[5002]: I0930 12:22:06.676069 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:22:06 crc kubenswrapper[5002]: E0930 12:22:06.677719 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:22:06 crc kubenswrapper[5002]: E0930 12:22:06.677916 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:22:07 crc kubenswrapper[5002]: I0930 12:22:07.675634 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:22:07 crc kubenswrapper[5002]: E0930 12:22:07.675934 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:22:08 crc kubenswrapper[5002]: I0930 12:22:08.408915 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/68756e8a-d882-403f-acd7-2c41fce4446f-metrics-certs\") pod \"network-metrics-daemon-dj2ln\" (UID: \"68756e8a-d882-403f-acd7-2c41fce4446f\") " pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:22:08 crc kubenswrapper[5002]: E0930 12:22:08.409134 5002 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 12:22:08 crc kubenswrapper[5002]: E0930 12:22:08.409214 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/68756e8a-d882-403f-acd7-2c41fce4446f-metrics-certs podName:68756e8a-d882-403f-acd7-2c41fce4446f nodeName:}" failed. No retries permitted until 2025-09-30 12:23:12.409194537 +0000 UTC m=+166.658876683 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/68756e8a-d882-403f-acd7-2c41fce4446f-metrics-certs") pod "network-metrics-daemon-dj2ln" (UID: "68756e8a-d882-403f-acd7-2c41fce4446f") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 12:22:08 crc kubenswrapper[5002]: I0930 12:22:08.675301 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:22:08 crc kubenswrapper[5002]: I0930 12:22:08.675364 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:22:08 crc kubenswrapper[5002]: E0930 12:22:08.675438 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:22:08 crc kubenswrapper[5002]: I0930 12:22:08.675588 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:22:08 crc kubenswrapper[5002]: E0930 12:22:08.675622 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:22:08 crc kubenswrapper[5002]: E0930 12:22:08.675836 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:22:09 crc kubenswrapper[5002]: I0930 12:22:09.675595 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:22:09 crc kubenswrapper[5002]: E0930 12:22:09.675833 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:22:09 crc kubenswrapper[5002]: I0930 12:22:09.676684 5002 scope.go:117] "RemoveContainer" containerID="9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5" Sep 30 12:22:09 crc kubenswrapper[5002]: E0930 12:22:09.676875 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4pvsr_openshift-ovn-kubernetes(7095aa7a-d067-4977-bdc5-3a45a52a6a39)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" Sep 30 12:22:10 crc kubenswrapper[5002]: I0930 12:22:10.675170 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:22:10 crc kubenswrapper[5002]: I0930 12:22:10.675213 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:22:10 crc kubenswrapper[5002]: E0930 12:22:10.675951 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:22:10 crc kubenswrapper[5002]: I0930 12:22:10.675386 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:22:10 crc kubenswrapper[5002]: E0930 12:22:10.676072 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:22:10 crc kubenswrapper[5002]: E0930 12:22:10.677054 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:22:11 crc kubenswrapper[5002]: I0930 12:22:11.675369 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:22:11 crc kubenswrapper[5002]: E0930 12:22:11.675584 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:22:12 crc kubenswrapper[5002]: I0930 12:22:12.676165 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:22:12 crc kubenswrapper[5002]: I0930 12:22:12.676301 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:22:12 crc kubenswrapper[5002]: I0930 12:22:12.676521 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:22:12 crc kubenswrapper[5002]: E0930 12:22:12.676657 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:22:12 crc kubenswrapper[5002]: E0930 12:22:12.676793 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:22:12 crc kubenswrapper[5002]: E0930 12:22:12.676861 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:22:13 crc kubenswrapper[5002]: I0930 12:22:13.676003 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:22:13 crc kubenswrapper[5002]: E0930 12:22:13.676187 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:22:14 crc kubenswrapper[5002]: I0930 12:22:14.675302 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:22:14 crc kubenswrapper[5002]: E0930 12:22:14.675520 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:22:14 crc kubenswrapper[5002]: I0930 12:22:14.675633 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:22:14 crc kubenswrapper[5002]: I0930 12:22:14.675713 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:22:14 crc kubenswrapper[5002]: E0930 12:22:14.675860 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:22:14 crc kubenswrapper[5002]: E0930 12:22:14.676441 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:22:15 crc kubenswrapper[5002]: I0930 12:22:15.675274 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:22:15 crc kubenswrapper[5002]: E0930 12:22:15.675460 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:22:16 crc kubenswrapper[5002]: I0930 12:22:16.675188 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:22:16 crc kubenswrapper[5002]: I0930 12:22:16.675227 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:22:16 crc kubenswrapper[5002]: I0930 12:22:16.675266 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:22:16 crc kubenswrapper[5002]: E0930 12:22:16.679714 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:22:16 crc kubenswrapper[5002]: E0930 12:22:16.680132 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:22:16 crc kubenswrapper[5002]: E0930 12:22:16.680437 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:22:17 crc kubenswrapper[5002]: I0930 12:22:17.675501 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:22:17 crc kubenswrapper[5002]: E0930 12:22:17.675653 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:22:18 crc kubenswrapper[5002]: I0930 12:22:18.675578 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:22:18 crc kubenswrapper[5002]: I0930 12:22:18.675685 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:22:18 crc kubenswrapper[5002]: I0930 12:22:18.675780 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:22:18 crc kubenswrapper[5002]: E0930 12:22:18.676303 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:22:18 crc kubenswrapper[5002]: E0930 12:22:18.676446 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:22:18 crc kubenswrapper[5002]: E0930 12:22:18.676651 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:22:19 crc kubenswrapper[5002]: I0930 12:22:19.675023 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:22:19 crc kubenswrapper[5002]: E0930 12:22:19.675415 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:22:20 crc kubenswrapper[5002]: I0930 12:22:20.676000 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:22:20 crc kubenswrapper[5002]: I0930 12:22:20.676042 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:22:20 crc kubenswrapper[5002]: E0930 12:22:20.676227 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:22:20 crc kubenswrapper[5002]: I0930 12:22:20.676288 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:22:20 crc kubenswrapper[5002]: E0930 12:22:20.676431 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:22:20 crc kubenswrapper[5002]: E0930 12:22:20.676655 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:22:21 crc kubenswrapper[5002]: I0930 12:22:21.675130 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:22:21 crc kubenswrapper[5002]: E0930 12:22:21.675365 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:22:22 crc kubenswrapper[5002]: I0930 12:22:22.675819 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:22:22 crc kubenswrapper[5002]: I0930 12:22:22.675859 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:22:22 crc kubenswrapper[5002]: I0930 12:22:22.675851 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:22:22 crc kubenswrapper[5002]: E0930 12:22:22.676031 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:22:22 crc kubenswrapper[5002]: E0930 12:22:22.676626 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:22:22 crc kubenswrapper[5002]: E0930 12:22:22.676825 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:22:22 crc kubenswrapper[5002]: I0930 12:22:22.677263 5002 scope.go:117] "RemoveContainer" containerID="9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5" Sep 30 12:22:22 crc kubenswrapper[5002]: E0930 12:22:22.677614 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4pvsr_openshift-ovn-kubernetes(7095aa7a-d067-4977-bdc5-3a45a52a6a39)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" Sep 30 12:22:23 crc kubenswrapper[5002]: I0930 12:22:23.675414 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:22:23 crc kubenswrapper[5002]: E0930 12:22:23.675574 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:22:24 crc kubenswrapper[5002]: I0930 12:22:24.455532 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ttfn8_2bd9f18d-bfb3-4bd7-9a87-242029cd3200/kube-multus/1.log" Sep 30 12:22:24 crc kubenswrapper[5002]: I0930 12:22:24.456403 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ttfn8_2bd9f18d-bfb3-4bd7-9a87-242029cd3200/kube-multus/0.log" Sep 30 12:22:24 crc kubenswrapper[5002]: I0930 12:22:24.456513 5002 generic.go:334] "Generic (PLEG): container finished" podID="2bd9f18d-bfb3-4bd7-9a87-242029cd3200" containerID="4cf7d940b7333c33f9092bf1a630d298ae51fee0947c17ef09d902ae5dc1103c" exitCode=1 Sep 30 12:22:24 crc kubenswrapper[5002]: I0930 12:22:24.456560 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-ttfn8" event={"ID":"2bd9f18d-bfb3-4bd7-9a87-242029cd3200","Type":"ContainerDied","Data":"4cf7d940b7333c33f9092bf1a630d298ae51fee0947c17ef09d902ae5dc1103c"} Sep 30 12:22:24 crc kubenswrapper[5002]: I0930 12:22:24.456608 5002 scope.go:117] "RemoveContainer" containerID="0809a587f4b73c4a5e632e8db2cc0d4a957eded6da9a771cf0b53e4862de54e5" Sep 30 12:22:24 crc kubenswrapper[5002]: I0930 12:22:24.458830 5002 scope.go:117] "RemoveContainer" containerID="4cf7d940b7333c33f9092bf1a630d298ae51fee0947c17ef09d902ae5dc1103c" Sep 30 12:22:24 crc kubenswrapper[5002]: E0930 12:22:24.459349 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-ttfn8_openshift-multus(2bd9f18d-bfb3-4bd7-9a87-242029cd3200)\"" pod="openshift-multus/multus-ttfn8" podUID="2bd9f18d-bfb3-4bd7-9a87-242029cd3200" Sep 30 12:22:24 crc kubenswrapper[5002]: I0930 12:22:24.485800 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ptww2" podStartSLOduration=95.485774026 podStartE2EDuration="1m35.485774026s" podCreationTimestamp="2025-09-30 12:20:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:06.411100569 +0000 UTC m=+100.660782755" watchObservedRunningTime="2025-09-30 12:22:24.485774026 +0000 UTC m=+118.735456202" Sep 30 12:22:24 crc kubenswrapper[5002]: I0930 12:22:24.675827 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:22:24 crc kubenswrapper[5002]: I0930 12:22:24.675897 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:22:24 crc kubenswrapper[5002]: E0930 12:22:24.676031 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:22:24 crc kubenswrapper[5002]: I0930 12:22:24.676368 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:22:24 crc kubenswrapper[5002]: E0930 12:22:24.676550 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:22:24 crc kubenswrapper[5002]: E0930 12:22:24.676985 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:22:25 crc kubenswrapper[5002]: I0930 12:22:25.462566 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ttfn8_2bd9f18d-bfb3-4bd7-9a87-242029cd3200/kube-multus/1.log" Sep 30 12:22:25 crc kubenswrapper[5002]: I0930 12:22:25.675584 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:22:25 crc kubenswrapper[5002]: E0930 12:22:25.675743 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:22:26 crc kubenswrapper[5002]: E0930 12:22:26.616622 5002 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Sep 30 12:22:26 crc kubenswrapper[5002]: I0930 12:22:26.675702 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:22:26 crc kubenswrapper[5002]: I0930 12:22:26.675735 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:22:26 crc kubenswrapper[5002]: E0930 12:22:26.677653 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:22:26 crc kubenswrapper[5002]: I0930 12:22:26.677679 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:22:26 crc kubenswrapper[5002]: E0930 12:22:26.677794 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:22:26 crc kubenswrapper[5002]: E0930 12:22:26.677900 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:22:26 crc kubenswrapper[5002]: E0930 12:22:26.760220 5002 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 12:22:27 crc kubenswrapper[5002]: I0930 12:22:27.675517 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:22:27 crc kubenswrapper[5002]: E0930 12:22:27.675670 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:22:28 crc kubenswrapper[5002]: I0930 12:22:28.675779 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:22:28 crc kubenswrapper[5002]: E0930 12:22:28.675955 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:22:28 crc kubenswrapper[5002]: I0930 12:22:28.676235 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:22:28 crc kubenswrapper[5002]: E0930 12:22:28.676325 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:22:28 crc kubenswrapper[5002]: I0930 12:22:28.676748 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:22:28 crc kubenswrapper[5002]: E0930 12:22:28.676870 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:22:29 crc kubenswrapper[5002]: I0930 12:22:29.675305 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:22:29 crc kubenswrapper[5002]: E0930 12:22:29.675592 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:22:30 crc kubenswrapper[5002]: I0930 12:22:30.675848 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:22:30 crc kubenswrapper[5002]: I0930 12:22:30.675963 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:22:30 crc kubenswrapper[5002]: E0930 12:22:30.676106 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:22:30 crc kubenswrapper[5002]: I0930 12:22:30.676148 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:22:30 crc kubenswrapper[5002]: E0930 12:22:30.676276 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:22:30 crc kubenswrapper[5002]: E0930 12:22:30.676422 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:22:31 crc kubenswrapper[5002]: I0930 12:22:31.675568 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:22:31 crc kubenswrapper[5002]: E0930 12:22:31.675775 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:22:31 crc kubenswrapper[5002]: E0930 12:22:31.762232 5002 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 12:22:32 crc kubenswrapper[5002]: I0930 12:22:32.679364 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:22:32 crc kubenswrapper[5002]: E0930 12:22:32.679582 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:22:32 crc kubenswrapper[5002]: I0930 12:22:32.679946 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:22:32 crc kubenswrapper[5002]: E0930 12:22:32.680043 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:22:32 crc kubenswrapper[5002]: I0930 12:22:32.680264 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:22:32 crc kubenswrapper[5002]: E0930 12:22:32.680398 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:22:33 crc kubenswrapper[5002]: I0930 12:22:33.675361 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:22:33 crc kubenswrapper[5002]: E0930 12:22:33.676603 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:22:34 crc kubenswrapper[5002]: I0930 12:22:34.675627 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:22:34 crc kubenswrapper[5002]: I0930 12:22:34.675783 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:22:34 crc kubenswrapper[5002]: I0930 12:22:34.675856 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:22:34 crc kubenswrapper[5002]: E0930 12:22:34.676043 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:22:34 crc kubenswrapper[5002]: E0930 12:22:34.676215 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:22:34 crc kubenswrapper[5002]: E0930 12:22:34.676368 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:22:35 crc kubenswrapper[5002]: I0930 12:22:35.675675 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:22:35 crc kubenswrapper[5002]: E0930 12:22:35.675853 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:22:36 crc kubenswrapper[5002]: I0930 12:22:36.675796 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:22:36 crc kubenswrapper[5002]: I0930 12:22:36.675915 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:22:36 crc kubenswrapper[5002]: E0930 12:22:36.678029 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:22:36 crc kubenswrapper[5002]: I0930 12:22:36.678171 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:22:36 crc kubenswrapper[5002]: E0930 12:22:36.678689 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:22:36 crc kubenswrapper[5002]: E0930 12:22:36.678796 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:22:36 crc kubenswrapper[5002]: I0930 12:22:36.679183 5002 scope.go:117] "RemoveContainer" containerID="9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5" Sep 30 12:22:36 crc kubenswrapper[5002]: E0930 12:22:36.763239 5002 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 12:22:37 crc kubenswrapper[5002]: I0930 12:22:37.507224 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pvsr_7095aa7a-d067-4977-bdc5-3a45a52a6a39/ovnkube-controller/3.log" Sep 30 12:22:37 crc kubenswrapper[5002]: I0930 12:22:37.510599 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" event={"ID":"7095aa7a-d067-4977-bdc5-3a45a52a6a39","Type":"ContainerStarted","Data":"f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f"} Sep 30 12:22:37 crc kubenswrapper[5002]: I0930 12:22:37.511761 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:22:37 crc kubenswrapper[5002]: I0930 12:22:37.548276 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" podStartSLOduration=107.54825118 podStartE2EDuration="1m47.54825118s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:37.547506678 +0000 UTC m=+131.797188834" watchObservedRunningTime="2025-09-30 12:22:37.54825118 +0000 UTC m=+131.797933356" Sep 30 12:22:37 crc kubenswrapper[5002]: I0930 12:22:37.566781 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-dj2ln"] Sep 30 12:22:37 crc kubenswrapper[5002]: I0930 12:22:37.566973 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:22:37 crc kubenswrapper[5002]: E0930 12:22:37.567155 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:22:38 crc kubenswrapper[5002]: I0930 12:22:38.675310 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:22:38 crc kubenswrapper[5002]: I0930 12:22:38.675406 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:22:38 crc kubenswrapper[5002]: E0930 12:22:38.675459 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:22:38 crc kubenswrapper[5002]: I0930 12:22:38.675553 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:22:38 crc kubenswrapper[5002]: I0930 12:22:38.675595 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:22:38 crc kubenswrapper[5002]: E0930 12:22:38.675775 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:22:38 crc kubenswrapper[5002]: E0930 12:22:38.675839 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:22:38 crc kubenswrapper[5002]: E0930 12:22:38.676000 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:22:38 crc kubenswrapper[5002]: I0930 12:22:38.676566 5002 scope.go:117] "RemoveContainer" containerID="4cf7d940b7333c33f9092bf1a630d298ae51fee0947c17ef09d902ae5dc1103c" Sep 30 12:22:39 crc kubenswrapper[5002]: I0930 12:22:39.520635 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ttfn8_2bd9f18d-bfb3-4bd7-9a87-242029cd3200/kube-multus/1.log" Sep 30 12:22:39 crc kubenswrapper[5002]: I0930 12:22:39.521170 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-ttfn8" event={"ID":"2bd9f18d-bfb3-4bd7-9a87-242029cd3200","Type":"ContainerStarted","Data":"c5f7e86f7e2139fe8fdc7a6d09f418a223f9da04a6950e5dec018265268c7de5"} Sep 30 12:22:40 crc kubenswrapper[5002]: I0930 12:22:40.675500 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:22:40 crc kubenswrapper[5002]: I0930 12:22:40.675529 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:22:40 crc kubenswrapper[5002]: I0930 12:22:40.675551 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:22:40 crc kubenswrapper[5002]: I0930 12:22:40.675613 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:22:40 crc kubenswrapper[5002]: E0930 12:22:40.675661 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 12:22:40 crc kubenswrapper[5002]: E0930 12:22:40.676023 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 12:22:40 crc kubenswrapper[5002]: E0930 12:22:40.676222 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 12:22:40 crc kubenswrapper[5002]: E0930 12:22:40.676388 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dj2ln" podUID="68756e8a-d882-403f-acd7-2c41fce4446f" Sep 30 12:22:42 crc kubenswrapper[5002]: I0930 12:22:42.676139 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:22:42 crc kubenswrapper[5002]: I0930 12:22:42.676231 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:22:42 crc kubenswrapper[5002]: I0930 12:22:42.676170 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:22:42 crc kubenswrapper[5002]: I0930 12:22:42.676368 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:22:42 crc kubenswrapper[5002]: I0930 12:22:42.679504 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Sep 30 12:22:42 crc kubenswrapper[5002]: I0930 12:22:42.679726 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Sep 30 12:22:42 crc kubenswrapper[5002]: I0930 12:22:42.679823 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Sep 30 12:22:42 crc kubenswrapper[5002]: I0930 12:22:42.680408 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Sep 30 12:22:42 crc kubenswrapper[5002]: I0930 12:22:42.680750 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Sep 30 12:22:42 crc kubenswrapper[5002]: I0930 12:22:42.682571 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Sep 30 12:22:45 crc kubenswrapper[5002]: I0930 12:22:45.948127 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Sep 30 12:22:45 crc kubenswrapper[5002]: I0930 12:22:45.993964 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-z66g4"] Sep 30 12:22:45 crc kubenswrapper[5002]: I0930 12:22:45.995057 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:45 crc kubenswrapper[5002]: I0930 12:22:45.998830 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Sep 30 12:22:45 crc kubenswrapper[5002]: I0930 12:22:45.999872 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.006446 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-p582f"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.007114 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.007160 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-p582f" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.007736 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.008589 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.010887 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-fp74c"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.011592 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-fp74c" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.014592 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.017923 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.018166 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.018415 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.018807 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.019045 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.020272 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.020685 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.021045 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.021880 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.021967 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.022178 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.022217 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.022507 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.022528 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.022798 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.022973 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.023019 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.023442 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.023778 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.024248 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.027335 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-h7jgw"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.027885 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-trnhx"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.028304 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h7jgw" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.028679 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-trnhx" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.041184 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.042181 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.044451 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-xnv2t"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.060803 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f4580810-3604-47b5-8a68-d337b0890b48-node-pullsecrets\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.060839 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-config\") pod \"controller-manager-879f6c89f-p582f\" (UID: \"11cba069-c2a4-4b6c-8e3e-38c76d27f20a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p582f" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.060855 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f4580810-3604-47b5-8a68-d337b0890b48-etcd-serving-ca\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.060872 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4c29m\" (UniqueName: \"kubernetes.io/projected/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-kube-api-access-4c29m\") pod \"controller-manager-879f6c89f-p582f\" (UID: \"11cba069-c2a4-4b6c-8e3e-38c76d27f20a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p582f" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.060888 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f4580810-3604-47b5-8a68-d337b0890b48-encryption-config\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.060904 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4580810-3604-47b5-8a68-d337b0890b48-config\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.060928 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/f4580810-3604-47b5-8a68-d337b0890b48-image-import-ca\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.060943 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwtln\" (UniqueName: \"kubernetes.io/projected/f4580810-3604-47b5-8a68-d337b0890b48-kube-api-access-wwtln\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.060958 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-client-ca\") pod \"controller-manager-879f6c89f-p582f\" (UID: \"11cba069-c2a4-4b6c-8e3e-38c76d27f20a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p582f" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.060971 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-serving-cert\") pod \"controller-manager-879f6c89f-p582f\" (UID: \"11cba069-c2a4-4b6c-8e3e-38c76d27f20a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p582f" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.060984 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-p582f\" (UID: \"11cba069-c2a4-4b6c-8e3e-38c76d27f20a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p582f" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.061001 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/f4580810-3604-47b5-8a68-d337b0890b48-audit\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.061030 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f4580810-3604-47b5-8a68-d337b0890b48-trusted-ca-bundle\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.061049 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4580810-3604-47b5-8a68-d337b0890b48-audit-dir\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.061068 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f4580810-3604-47b5-8a68-d337b0890b48-etcd-client\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.061080 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4580810-3604-47b5-8a68-d337b0890b48-serving-cert\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.064051 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-hns42"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.064465 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hns42" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.064889 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-xnv2t" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.065904 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.066070 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.066195 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.066310 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.066448 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.066550 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-tdxkp"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.066607 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.066780 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.066882 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.066958 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.067077 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.067093 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.067171 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.067211 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.067310 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.067359 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-tdxkp" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.067380 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.067445 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.067592 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.073413 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ngjm2"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.073837 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ngjm2" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.074555 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-8m9sz"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.075067 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-8m9sz" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.075513 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.076223 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.076388 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.076656 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.076795 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.076937 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-wk84c"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.077040 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.077118 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.077401 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.078062 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.078212 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.078324 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.078427 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.078610 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.078815 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.078966 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.079063 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.079160 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.079290 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.079402 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.079577 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.080955 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.082786 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-79sft"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.083340 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-79sft" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.086059 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.086218 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.086359 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.086566 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.087219 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.089714 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.095386 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.095580 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.095668 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.095792 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.095909 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.095981 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.096043 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.096130 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.096240 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.096315 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8kz2p"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.096362 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.096503 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.096653 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.096762 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.096911 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.097030 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.097137 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.097251 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.097365 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.097409 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.097622 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.097723 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.097848 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.097916 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zfcl9"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.098311 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.098405 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8kz2p" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.098314 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-lv4mz"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.113422 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.117699 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-lv4mz" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.140775 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v6kxk"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.148763 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v6kxk" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.153887 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.154537 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.154782 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.154892 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.155012 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.155104 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.155424 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.155606 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.157322 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.157761 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.158071 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-6rw5f"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.158156 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.158555 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.160159 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.160166 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.161077 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n5jzk"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.161180 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-6rw5f" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.161670 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/f4580810-3604-47b5-8a68-d337b0890b48-audit\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.161731 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b9be5683-0681-4c81-a534-a5f997ccde65-encryption-config\") pod \"apiserver-7bbb656c7d-pzv7v\" (UID: \"b9be5683-0681-4c81-a534-a5f997ccde65\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.161761 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f4580810-3604-47b5-8a68-d337b0890b48-trusted-ca-bundle\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.161788 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b9be5683-0681-4c81-a534-a5f997ccde65-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-pzv7v\" (UID: \"b9be5683-0681-4c81-a534-a5f997ccde65\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.161815 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4580810-3604-47b5-8a68-d337b0890b48-audit-dir\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.161835 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34704ef4-bb3a-4e40-ac9e-7543c634d17f-config\") pod \"console-operator-58897d9998-8m9sz\" (UID: \"34704ef4-bb3a-4e40-ac9e-7543c634d17f\") " pod="openshift-console-operator/console-operator-58897d9998-8m9sz" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.161854 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b9be5683-0681-4c81-a534-a5f997ccde65-audit-policies\") pod \"apiserver-7bbb656c7d-pzv7v\" (UID: \"b9be5683-0681-4c81-a534-a5f997ccde65\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.161882 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f4580810-3604-47b5-8a68-d337b0890b48-etcd-client\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.161905 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4580810-3604-47b5-8a68-d337b0890b48-serving-cert\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.161936 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d96zp\" (UniqueName: \"kubernetes.io/projected/34704ef4-bb3a-4e40-ac9e-7543c634d17f-kube-api-access-d96zp\") pod \"console-operator-58897d9998-8m9sz\" (UID: \"34704ef4-bb3a-4e40-ac9e-7543c634d17f\") " pod="openshift-console-operator/console-operator-58897d9998-8m9sz" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.161959 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bb85\" (UniqueName: \"kubernetes.io/projected/8978cad0-f5e7-4cd8-a862-225087df083e-kube-api-access-7bb85\") pod \"machine-approver-56656f9798-h7jgw\" (UID: \"8978cad0-f5e7-4cd8-a862-225087df083e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h7jgw" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.161969 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6w5w8"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.161982 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8978cad0-f5e7-4cd8-a862-225087df083e-auth-proxy-config\") pod \"machine-approver-56656f9798-h7jgw\" (UID: \"8978cad0-f5e7-4cd8-a862-225087df083e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h7jgw" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.162003 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/caaf9351-4cc8-41c4-8ebc-80e0243f23ad-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-8kz2p\" (UID: \"caaf9351-4cc8-41c4-8ebc-80e0243f23ad\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8kz2p" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.162044 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/962ecf1e-f7d6-46ca-9846-0f2c84d71b54-config\") pod \"openshift-apiserver-operator-796bbdcf4f-trnhx\" (UID: \"962ecf1e-f7d6-46ca-9846-0f2c84d71b54\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-trnhx" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.162066 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/caaf9351-4cc8-41c4-8ebc-80e0243f23ad-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-8kz2p\" (UID: \"caaf9351-4cc8-41c4-8ebc-80e0243f23ad\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8kz2p" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.162093 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f4580810-3604-47b5-8a68-d337b0890b48-node-pullsecrets\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.162115 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-config\") pod \"controller-manager-879f6c89f-p582f\" (UID: \"11cba069-c2a4-4b6c-8e3e-38c76d27f20a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p582f" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.162135 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f4580810-3604-47b5-8a68-d337b0890b48-etcd-serving-ca\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.162157 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/8978cad0-f5e7-4cd8-a862-225087df083e-machine-approver-tls\") pod \"machine-approver-56656f9798-h7jgw\" (UID: \"8978cad0-f5e7-4cd8-a862-225087df083e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h7jgw" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.162179 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndmfz\" (UniqueName: \"kubernetes.io/projected/962ecf1e-f7d6-46ca-9846-0f2c84d71b54-kube-api-access-ndmfz\") pod \"openshift-apiserver-operator-796bbdcf4f-trnhx\" (UID: \"962ecf1e-f7d6-46ca-9846-0f2c84d71b54\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-trnhx" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.162204 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4c29m\" (UniqueName: \"kubernetes.io/projected/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-kube-api-access-4c29m\") pod \"controller-manager-879f6c89f-p582f\" (UID: \"11cba069-c2a4-4b6c-8e3e-38c76d27f20a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p582f" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.162279 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6w5w8" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.162611 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n5jzk" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.164370 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.165644 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f4580810-3604-47b5-8a68-d337b0890b48-encryption-config\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.165705 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4580810-3604-47b5-8a68-d337b0890b48-config\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.165728 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/f4580810-3604-47b5-8a68-d337b0890b48-image-import-ca\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.165752 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b9be5683-0681-4c81-a534-a5f997ccde65-etcd-client\") pod \"apiserver-7bbb656c7d-pzv7v\" (UID: \"b9be5683-0681-4c81-a534-a5f997ccde65\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.165784 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwtln\" (UniqueName: \"kubernetes.io/projected/f4580810-3604-47b5-8a68-d337b0890b48-kube-api-access-wwtln\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.165812 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/34704ef4-bb3a-4e40-ac9e-7543c634d17f-serving-cert\") pod \"console-operator-58897d9998-8m9sz\" (UID: \"34704ef4-bb3a-4e40-ac9e-7543c634d17f\") " pod="openshift-console-operator/console-operator-58897d9998-8m9sz" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.165835 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b9be5683-0681-4c81-a534-a5f997ccde65-serving-cert\") pod \"apiserver-7bbb656c7d-pzv7v\" (UID: \"b9be5683-0681-4c81-a534-a5f997ccde65\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.165857 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b9be5683-0681-4c81-a534-a5f997ccde65-audit-dir\") pod \"apiserver-7bbb656c7d-pzv7v\" (UID: \"b9be5683-0681-4c81-a534-a5f997ccde65\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.165896 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/34704ef4-bb3a-4e40-ac9e-7543c634d17f-trusted-ca\") pod \"console-operator-58897d9998-8m9sz\" (UID: \"34704ef4-bb3a-4e40-ac9e-7543c634d17f\") " pod="openshift-console-operator/console-operator-58897d9998-8m9sz" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.165921 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxlls\" (UniqueName: \"kubernetes.io/projected/b9be5683-0681-4c81-a534-a5f997ccde65-kube-api-access-gxlls\") pod \"apiserver-7bbb656c7d-pzv7v\" (UID: \"b9be5683-0681-4c81-a534-a5f997ccde65\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.165950 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-client-ca\") pod \"controller-manager-879f6c89f-p582f\" (UID: \"11cba069-c2a4-4b6c-8e3e-38c76d27f20a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p582f" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.165975 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-serving-cert\") pod \"controller-manager-879f6c89f-p582f\" (UID: \"11cba069-c2a4-4b6c-8e3e-38c76d27f20a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p582f" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.166004 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-p582f\" (UID: \"11cba069-c2a4-4b6c-8e3e-38c76d27f20a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p582f" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.166029 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpgf4\" (UniqueName: \"kubernetes.io/projected/caaf9351-4cc8-41c4-8ebc-80e0243f23ad-kube-api-access-gpgf4\") pod \"openshift-controller-manager-operator-756b6f6bc6-8kz2p\" (UID: \"caaf9351-4cc8-41c4-8ebc-80e0243f23ad\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8kz2p" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.166062 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9be5683-0681-4c81-a534-a5f997ccde65-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-pzv7v\" (UID: \"b9be5683-0681-4c81-a534-a5f997ccde65\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.166084 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8978cad0-f5e7-4cd8-a862-225087df083e-config\") pod \"machine-approver-56656f9798-h7jgw\" (UID: \"8978cad0-f5e7-4cd8-a862-225087df083e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h7jgw" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.166115 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/962ecf1e-f7d6-46ca-9846-0f2c84d71b54-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-trnhx\" (UID: \"962ecf1e-f7d6-46ca-9846-0f2c84d71b54\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-trnhx" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.166278 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f4580810-3604-47b5-8a68-d337b0890b48-node-pullsecrets\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.166566 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f4580810-3604-47b5-8a68-d337b0890b48-etcd-serving-ca\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.166802 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/f4580810-3604-47b5-8a68-d337b0890b48-audit\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.167689 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f4580810-3604-47b5-8a68-d337b0890b48-trusted-ca-bundle\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.167735 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4580810-3604-47b5-8a68-d337b0890b48-audit-dir\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.167890 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-config\") pod \"controller-manager-879f6c89f-p582f\" (UID: \"11cba069-c2a4-4b6c-8e3e-38c76d27f20a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p582f" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.168657 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-client-ca\") pod \"controller-manager-879f6c89f-p582f\" (UID: \"11cba069-c2a4-4b6c-8e3e-38c76d27f20a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p582f" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.169622 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-p582f\" (UID: \"11cba069-c2a4-4b6c-8e3e-38c76d27f20a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p582f" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.170108 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4580810-3604-47b5-8a68-d337b0890b48-config\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.171388 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/f4580810-3604-47b5-8a68-d337b0890b48-image-import-ca\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.171733 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-2lfhz"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.172414 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2lfhz" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.172622 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f4580810-3604-47b5-8a68-d337b0890b48-encryption-config\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.173532 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-wqhm8"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.174232 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-wqhm8" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.178553 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f4580810-3604-47b5-8a68-d337b0890b48-etcd-client\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.180421 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.180610 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-88xml"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.181317 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dnmjr"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.181353 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-88xml" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.186259 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2jxnc"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.186846 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2pmxc"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.187242 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2jxnc" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.187317 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dnmjr" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.187400 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k6j67"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.188017 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k6j67" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.188114 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2pmxc" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.188227 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4580810-3604-47b5-8a68-d337b0890b48-serving-cert\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.189599 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-l9vch"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.190214 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-djckq"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.190227 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-l9vch" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.190666 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djckq" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.191884 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6tqs2"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.192273 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6tqs2" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.192359 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-dgbs4"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.194081 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-dgbs4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.194358 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9kzms"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.194771 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9kzms" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.195105 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hc84k"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.195448 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hc84k" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.195654 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.196050 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wtzqd"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.196267 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-serving-cert\") pod \"controller-manager-879f6c89f-p582f\" (UID: \"11cba069-c2a4-4b6c-8e3e-38c76d27f20a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p582f" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.196405 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wtzqd" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.196720 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.197441 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-4nrzw"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.198175 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-4nrzw" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.198653 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320575-h29rv"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.209068 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sczzw"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.209425 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320575-h29rv" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.210010 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-z66g4"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.210038 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-p582f"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.210105 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sczzw" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.214525 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.215805 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-fp74c"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.217195 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.217317 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-trnhx"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.218143 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-tdxkp"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.220157 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-hns42"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.220995 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ngjm2"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.222245 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-8m9sz"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.225502 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-h52ns"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.226117 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-lv4mz"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.226186 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-h52ns" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.226904 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n5jzk"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.228208 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-znwgl"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.229004 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-znwgl" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.229051 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dnmjr"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.231236 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-xnv2t"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.231257 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k6j67"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.232565 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-l9vch"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.233755 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-wk84c"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.234410 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9kzms"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.236344 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8kz2p"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.237084 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.238056 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.238126 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2pmxc"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.239135 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-79sft"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.242150 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-2lfhz"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.244535 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-2fqm7"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.245231 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6w5w8"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.245417 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-2fqm7" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.245729 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-wqhm8"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.246707 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hc84k"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.247806 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v6kxk"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.248778 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-rps48"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.249219 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-rps48" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.249824 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-88xml"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.251496 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-djckq"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.253097 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6tqs2"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.254666 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zfcl9"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.256408 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-dgbs4"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.257156 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.258205 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2jxnc"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.259802 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wtzqd"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.261318 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-4nrzw"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.262751 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-znwgl"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.264406 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-h52ns"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.265844 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320575-h29rv"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.267065 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndmfz\" (UniqueName: \"kubernetes.io/projected/962ecf1e-f7d6-46ca-9846-0f2c84d71b54-kube-api-access-ndmfz\") pod \"openshift-apiserver-operator-796bbdcf4f-trnhx\" (UID: \"962ecf1e-f7d6-46ca-9846-0f2c84d71b54\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-trnhx" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.267111 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b9be5683-0681-4c81-a534-a5f997ccde65-etcd-client\") pod \"apiserver-7bbb656c7d-pzv7v\" (UID: \"b9be5683-0681-4c81-a534-a5f997ccde65\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.267153 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/34704ef4-bb3a-4e40-ac9e-7543c634d17f-serving-cert\") pod \"console-operator-58897d9998-8m9sz\" (UID: \"34704ef4-bb3a-4e40-ac9e-7543c634d17f\") " pod="openshift-console-operator/console-operator-58897d9998-8m9sz" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.267175 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b9be5683-0681-4c81-a534-a5f997ccde65-serving-cert\") pod \"apiserver-7bbb656c7d-pzv7v\" (UID: \"b9be5683-0681-4c81-a534-a5f997ccde65\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.267198 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b9be5683-0681-4c81-a534-a5f997ccde65-audit-dir\") pod \"apiserver-7bbb656c7d-pzv7v\" (UID: \"b9be5683-0681-4c81-a534-a5f997ccde65\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.267223 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/34704ef4-bb3a-4e40-ac9e-7543c634d17f-trusted-ca\") pod \"console-operator-58897d9998-8m9sz\" (UID: \"34704ef4-bb3a-4e40-ac9e-7543c634d17f\") " pod="openshift-console-operator/console-operator-58897d9998-8m9sz" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.267245 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxlls\" (UniqueName: \"kubernetes.io/projected/b9be5683-0681-4c81-a534-a5f997ccde65-kube-api-access-gxlls\") pod \"apiserver-7bbb656c7d-pzv7v\" (UID: \"b9be5683-0681-4c81-a534-a5f997ccde65\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.267274 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpgf4\" (UniqueName: \"kubernetes.io/projected/caaf9351-4cc8-41c4-8ebc-80e0243f23ad-kube-api-access-gpgf4\") pod \"openshift-controller-manager-operator-756b6f6bc6-8kz2p\" (UID: \"caaf9351-4cc8-41c4-8ebc-80e0243f23ad\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8kz2p" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.267300 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9be5683-0681-4c81-a534-a5f997ccde65-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-pzv7v\" (UID: \"b9be5683-0681-4c81-a534-a5f997ccde65\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.267319 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8978cad0-f5e7-4cd8-a862-225087df083e-config\") pod \"machine-approver-56656f9798-h7jgw\" (UID: \"8978cad0-f5e7-4cd8-a862-225087df083e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h7jgw" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.267342 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/962ecf1e-f7d6-46ca-9846-0f2c84d71b54-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-trnhx\" (UID: \"962ecf1e-f7d6-46ca-9846-0f2c84d71b54\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-trnhx" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.267382 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b9be5683-0681-4c81-a534-a5f997ccde65-encryption-config\") pod \"apiserver-7bbb656c7d-pzv7v\" (UID: \"b9be5683-0681-4c81-a534-a5f997ccde65\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.267402 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b9be5683-0681-4c81-a534-a5f997ccde65-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-pzv7v\" (UID: \"b9be5683-0681-4c81-a534-a5f997ccde65\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.267431 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34704ef4-bb3a-4e40-ac9e-7543c634d17f-config\") pod \"console-operator-58897d9998-8m9sz\" (UID: \"34704ef4-bb3a-4e40-ac9e-7543c634d17f\") " pod="openshift-console-operator/console-operator-58897d9998-8m9sz" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.267451 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b9be5683-0681-4c81-a534-a5f997ccde65-audit-policies\") pod \"apiserver-7bbb656c7d-pzv7v\" (UID: \"b9be5683-0681-4c81-a534-a5f997ccde65\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.267493 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d96zp\" (UniqueName: \"kubernetes.io/projected/34704ef4-bb3a-4e40-ac9e-7543c634d17f-kube-api-access-d96zp\") pod \"console-operator-58897d9998-8m9sz\" (UID: \"34704ef4-bb3a-4e40-ac9e-7543c634d17f\") " pod="openshift-console-operator/console-operator-58897d9998-8m9sz" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.267503 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b9be5683-0681-4c81-a534-a5f997ccde65-audit-dir\") pod \"apiserver-7bbb656c7d-pzv7v\" (UID: \"b9be5683-0681-4c81-a534-a5f997ccde65\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.267516 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bb85\" (UniqueName: \"kubernetes.io/projected/8978cad0-f5e7-4cd8-a862-225087df083e-kube-api-access-7bb85\") pod \"machine-approver-56656f9798-h7jgw\" (UID: \"8978cad0-f5e7-4cd8-a862-225087df083e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h7jgw" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.269403 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b9be5683-0681-4c81-a534-a5f997ccde65-audit-policies\") pod \"apiserver-7bbb656c7d-pzv7v\" (UID: \"b9be5683-0681-4c81-a534-a5f997ccde65\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.269428 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b9be5683-0681-4c81-a534-a5f997ccde65-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-pzv7v\" (UID: \"b9be5683-0681-4c81-a534-a5f997ccde65\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.268907 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/34704ef4-bb3a-4e40-ac9e-7543c634d17f-trusted-ca\") pod \"console-operator-58897d9998-8m9sz\" (UID: \"34704ef4-bb3a-4e40-ac9e-7543c634d17f\") " pod="openshift-console-operator/console-operator-58897d9998-8m9sz" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.269124 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9be5683-0681-4c81-a534-a5f997ccde65-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-pzv7v\" (UID: \"b9be5683-0681-4c81-a534-a5f997ccde65\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.269275 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sczzw"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.268640 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8978cad0-f5e7-4cd8-a862-225087df083e-config\") pod \"machine-approver-56656f9798-h7jgw\" (UID: \"8978cad0-f5e7-4cd8-a862-225087df083e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h7jgw" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.269321 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8978cad0-f5e7-4cd8-a862-225087df083e-auth-proxy-config\") pod \"machine-approver-56656f9798-h7jgw\" (UID: \"8978cad0-f5e7-4cd8-a862-225087df083e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h7jgw" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.269570 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/caaf9351-4cc8-41c4-8ebc-80e0243f23ad-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-8kz2p\" (UID: \"caaf9351-4cc8-41c4-8ebc-80e0243f23ad\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8kz2p" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.269618 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/962ecf1e-f7d6-46ca-9846-0f2c84d71b54-config\") pod \"openshift-apiserver-operator-796bbdcf4f-trnhx\" (UID: \"962ecf1e-f7d6-46ca-9846-0f2c84d71b54\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-trnhx" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.269641 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/caaf9351-4cc8-41c4-8ebc-80e0243f23ad-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-8kz2p\" (UID: \"caaf9351-4cc8-41c4-8ebc-80e0243f23ad\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8kz2p" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.269669 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/8978cad0-f5e7-4cd8-a862-225087df083e-machine-approver-tls\") pod \"machine-approver-56656f9798-h7jgw\" (UID: \"8978cad0-f5e7-4cd8-a862-225087df083e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h7jgw" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.269943 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b9be5683-0681-4c81-a534-a5f997ccde65-serving-cert\") pod \"apiserver-7bbb656c7d-pzv7v\" (UID: \"b9be5683-0681-4c81-a534-a5f997ccde65\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.270047 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b9be5683-0681-4c81-a534-a5f997ccde65-encryption-config\") pod \"apiserver-7bbb656c7d-pzv7v\" (UID: \"b9be5683-0681-4c81-a534-a5f997ccde65\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.270738 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/962ecf1e-f7d6-46ca-9846-0f2c84d71b54-config\") pod \"openshift-apiserver-operator-796bbdcf4f-trnhx\" (UID: \"962ecf1e-f7d6-46ca-9846-0f2c84d71b54\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-trnhx" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.270777 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/caaf9351-4cc8-41c4-8ebc-80e0243f23ad-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-8kz2p\" (UID: \"caaf9351-4cc8-41c4-8ebc-80e0243f23ad\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8kz2p" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.271100 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-2fqm7"] Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.271144 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8978cad0-f5e7-4cd8-a862-225087df083e-auth-proxy-config\") pod \"machine-approver-56656f9798-h7jgw\" (UID: \"8978cad0-f5e7-4cd8-a862-225087df083e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h7jgw" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.271455 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34704ef4-bb3a-4e40-ac9e-7543c634d17f-config\") pod \"console-operator-58897d9998-8m9sz\" (UID: \"34704ef4-bb3a-4e40-ac9e-7543c634d17f\") " pod="openshift-console-operator/console-operator-58897d9998-8m9sz" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.271495 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/34704ef4-bb3a-4e40-ac9e-7543c634d17f-serving-cert\") pod \"console-operator-58897d9998-8m9sz\" (UID: \"34704ef4-bb3a-4e40-ac9e-7543c634d17f\") " pod="openshift-console-operator/console-operator-58897d9998-8m9sz" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.272357 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/caaf9351-4cc8-41c4-8ebc-80e0243f23ad-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-8kz2p\" (UID: \"caaf9351-4cc8-41c4-8ebc-80e0243f23ad\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8kz2p" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.272572 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b9be5683-0681-4c81-a534-a5f997ccde65-etcd-client\") pod \"apiserver-7bbb656c7d-pzv7v\" (UID: \"b9be5683-0681-4c81-a534-a5f997ccde65\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.273220 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/8978cad0-f5e7-4cd8-a862-225087df083e-machine-approver-tls\") pod \"machine-approver-56656f9798-h7jgw\" (UID: \"8978cad0-f5e7-4cd8-a862-225087df083e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h7jgw" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.284809 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/962ecf1e-f7d6-46ca-9846-0f2c84d71b54-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-trnhx\" (UID: \"962ecf1e-f7d6-46ca-9846-0f2c84d71b54\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-trnhx" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.296906 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.317382 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.337950 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.358017 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.398052 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.416886 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.437364 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.457561 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.477383 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.497427 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.516847 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.537845 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.557327 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.577555 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.597999 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.618336 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.638163 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.658833 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.678132 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.724765 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4c29m\" (UniqueName: \"kubernetes.io/projected/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-kube-api-access-4c29m\") pod \"controller-manager-879f6c89f-p582f\" (UID: \"11cba069-c2a4-4b6c-8e3e-38c76d27f20a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p582f" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.737717 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.743042 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwtln\" (UniqueName: \"kubernetes.io/projected/f4580810-3604-47b5-8a68-d337b0890b48-kube-api-access-wwtln\") pod \"apiserver-76f77b778f-z66g4\" (UID: \"f4580810-3604-47b5-8a68-d337b0890b48\") " pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.756525 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.780034 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.797170 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.817441 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.837296 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.865632 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.877091 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.897267 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.918033 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.938806 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.939591 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.958790 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.977968 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.982302 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-p582f" Sep 30 12:22:46 crc kubenswrapper[5002]: I0930 12:22:46.999122 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.018449 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.039302 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.058130 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.077373 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.096859 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.119011 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.137458 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.158762 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.177892 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.197437 5002 request.go:700] Waited for 1.005733981s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-api/secrets?fieldSelector=metadata.name%3Dcontrol-plane-machine-set-operator-dockercfg-k9rxt&limit=500&resourceVersion=0 Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.199374 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.218569 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.237719 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.250832 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-z66g4"] Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.257424 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.261212 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-p582f"] Sep 30 12:22:47 crc kubenswrapper[5002]: W0930 12:22:47.268411 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod11cba069_c2a4_4b6c_8e3e_38c76d27f20a.slice/crio-05f186dd2e8f8de6f2d12c0816835780e5703b1dab669f22d00fe43de3fa26ad WatchSource:0}: Error finding container 05f186dd2e8f8de6f2d12c0816835780e5703b1dab669f22d00fe43de3fa26ad: Status 404 returned error can't find the container with id 05f186dd2e8f8de6f2d12c0816835780e5703b1dab669f22d00fe43de3fa26ad Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.278092 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.297751 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.316983 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.337785 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.357699 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.377788 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.397583 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.416880 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.437174 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.458404 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.478175 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.498218 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.517579 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.543505 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.549337 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-p582f" event={"ID":"11cba069-c2a4-4b6c-8e3e-38c76d27f20a","Type":"ContainerStarted","Data":"65e3484e040003c7a1eeda15c15f9e3e3a72be6329f94876d35c259e81a3ce25"} Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.549376 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-p582f" event={"ID":"11cba069-c2a4-4b6c-8e3e-38c76d27f20a","Type":"ContainerStarted","Data":"05f186dd2e8f8de6f2d12c0816835780e5703b1dab669f22d00fe43de3fa26ad"} Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.549616 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-p582f" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.551164 5002 generic.go:334] "Generic (PLEG): container finished" podID="f4580810-3604-47b5-8a68-d337b0890b48" containerID="57801394a380106f0493ee8eec3b0ae9307f5478d528751111fe77622fee147f" exitCode=0 Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.551447 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-z66g4" event={"ID":"f4580810-3604-47b5-8a68-d337b0890b48","Type":"ContainerDied","Data":"57801394a380106f0493ee8eec3b0ae9307f5478d528751111fe77622fee147f"} Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.551641 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-z66g4" event={"ID":"f4580810-3604-47b5-8a68-d337b0890b48","Type":"ContainerStarted","Data":"313e1ced7c61d86044ce5978689663c2a7b174f8113e57a2abc7d55babd2d031"} Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.551697 5002 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-p582f container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.551934 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-p582f" podUID="11cba069-c2a4-4b6c-8e3e-38c76d27f20a" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.557036 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.577357 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.598228 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.618178 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.637808 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.657450 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.677890 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.697230 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.717612 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.738224 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.757283 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.777663 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.798152 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.817606 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.838368 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.857412 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.877792 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.897729 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.917452 5002 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.938268 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.957874 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.978400 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Sep 30 12:22:47 crc kubenswrapper[5002]: I0930 12:22:47.998709 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.017224 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.037674 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.058841 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.077341 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.120395 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndmfz\" (UniqueName: \"kubernetes.io/projected/962ecf1e-f7d6-46ca-9846-0f2c84d71b54-kube-api-access-ndmfz\") pod \"openshift-apiserver-operator-796bbdcf4f-trnhx\" (UID: \"962ecf1e-f7d6-46ca-9846-0f2c84d71b54\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-trnhx" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.139043 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxlls\" (UniqueName: \"kubernetes.io/projected/b9be5683-0681-4c81-a534-a5f997ccde65-kube-api-access-gxlls\") pod \"apiserver-7bbb656c7d-pzv7v\" (UID: \"b9be5683-0681-4c81-a534-a5f997ccde65\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.156325 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpgf4\" (UniqueName: \"kubernetes.io/projected/caaf9351-4cc8-41c4-8ebc-80e0243f23ad-kube-api-access-gpgf4\") pod \"openshift-controller-manager-operator-756b6f6bc6-8kz2p\" (UID: \"caaf9351-4cc8-41c4-8ebc-80e0243f23ad\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8kz2p" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.181868 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bb85\" (UniqueName: \"kubernetes.io/projected/8978cad0-f5e7-4cd8-a862-225087df083e-kube-api-access-7bb85\") pod \"machine-approver-56656f9798-h7jgw\" (UID: \"8978cad0-f5e7-4cd8-a862-225087df083e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h7jgw" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.195857 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d96zp\" (UniqueName: \"kubernetes.io/projected/34704ef4-bb3a-4e40-ac9e-7543c634d17f-kube-api-access-d96zp\") pod \"console-operator-58897d9998-8m9sz\" (UID: \"34704ef4-bb3a-4e40-ac9e-7543c634d17f\") " pod="openshift-console-operator/console-operator-58897d9998-8m9sz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.216741 5002 request.go:700] Waited for 1.838380748s due to client-side throttling, not priority and fairness, request: PATCH:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager-operator/pods/openshift-controller-manager-operator-756b6f6bc6-8kz2p/status Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.219811 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.229723 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h7jgw" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.248913 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-trnhx" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.291766 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/87454cc1-5175-495b-9551-a19474d51e4a-audit-dir\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.291811 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.292013 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.292081 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/048223bb-4ff9-41e0-9b78-457a720ff399-bound-sa-token\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.292241 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7151e9d9-4417-40bb-aac5-8f838065aa79-trusted-ca-bundle\") pod \"console-f9d7485db-79sft\" (UID: \"7151e9d9-4417-40bb-aac5-8f838065aa79\") " pod="openshift-console/console-f9d7485db-79sft" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.292386 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/048223bb-4ff9-41e0-9b78-457a720ff399-registry-tls\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.292430 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/048223bb-4ff9-41e0-9b78-457a720ff399-registry-certificates\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.292516 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtlqs\" (UniqueName: \"kubernetes.io/projected/87454cc1-5175-495b-9551-a19474d51e4a-kube-api-access-xtlqs\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.292569 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7151e9d9-4417-40bb-aac5-8f838065aa79-service-ca\") pod \"console-f9d7485db-79sft\" (UID: \"7151e9d9-4417-40bb-aac5-8f838065aa79\") " pod="openshift-console/console-f9d7485db-79sft" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.292676 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1c0ed680-87c2-438b-aca7-b9fa1c19d414-serving-cert\") pod \"route-controller-manager-6576b87f9c-57t2m\" (UID: \"1c0ed680-87c2-438b-aca7-b9fa1c19d414\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.292751 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p26xr\" (UniqueName: \"kubernetes.io/projected/f7cb4999-7580-47a2-a3e6-764d49684eaf-kube-api-access-p26xr\") pod \"etcd-operator-b45778765-lv4mz\" (UID: \"f7cb4999-7580-47a2-a3e6-764d49684eaf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lv4mz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.293208 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/048223bb-4ff9-41e0-9b78-457a720ff399-trusted-ca\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.293244 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1c0ed680-87c2-438b-aca7-b9fa1c19d414-client-ca\") pod \"route-controller-manager-6576b87f9c-57t2m\" (UID: \"1c0ed680-87c2-438b-aca7-b9fa1c19d414\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.293288 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.293324 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c0ed680-87c2-438b-aca7-b9fa1c19d414-config\") pod \"route-controller-manager-6576b87f9c-57t2m\" (UID: \"1c0ed680-87c2-438b-aca7-b9fa1c19d414\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.293374 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/14158f4f-292f-4693-9ed2-c7fa97452ecb-serving-cert\") pod \"openshift-config-operator-7777fb866f-hns42\" (UID: \"14158f4f-292f-4693-9ed2-c7fa97452ecb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hns42" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.293508 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5d8qh\" (UniqueName: \"kubernetes.io/projected/14158f4f-292f-4693-9ed2-c7fa97452ecb-kube-api-access-5d8qh\") pod \"openshift-config-operator-7777fb866f-hns42\" (UID: \"14158f4f-292f-4693-9ed2-c7fa97452ecb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hns42" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.293557 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7151e9d9-4417-40bb-aac5-8f838065aa79-console-oauth-config\") pod \"console-f9d7485db-79sft\" (UID: \"7151e9d9-4417-40bb-aac5-8f838065aa79\") " pod="openshift-console/console-f9d7485db-79sft" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.293577 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7151e9d9-4417-40bb-aac5-8f838065aa79-oauth-serving-cert\") pod \"console-f9d7485db-79sft\" (UID: \"7151e9d9-4417-40bb-aac5-8f838065aa79\") " pod="openshift-console/console-f9d7485db-79sft" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.293623 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpnvt\" (UniqueName: \"kubernetes.io/projected/1c0ed680-87c2-438b-aca7-b9fa1c19d414-kube-api-access-hpnvt\") pod \"route-controller-manager-6576b87f9c-57t2m\" (UID: \"1c0ed680-87c2-438b-aca7-b9fa1c19d414\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m" Sep 30 12:22:48 crc kubenswrapper[5002]: E0930 12:22:48.293635 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:48.793618696 +0000 UTC m=+143.043300842 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.293841 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7151e9d9-4417-40bb-aac5-8f838065aa79-console-config\") pod \"console-f9d7485db-79sft\" (UID: \"7151e9d9-4417-40bb-aac5-8f838065aa79\") " pod="openshift-console/console-f9d7485db-79sft" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.293870 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/14158f4f-292f-4693-9ed2-c7fa97452ecb-available-featuregates\") pod \"openshift-config-operator-7777fb866f-hns42\" (UID: \"14158f4f-292f-4693-9ed2-c7fa97452ecb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hns42" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.293885 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7151e9d9-4417-40bb-aac5-8f838065aa79-console-serving-cert\") pod \"console-f9d7485db-79sft\" (UID: \"7151e9d9-4417-40bb-aac5-8f838065aa79\") " pod="openshift-console/console-f9d7485db-79sft" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.293903 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.293921 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f7cb4999-7580-47a2-a3e6-764d49684eaf-serving-cert\") pod \"etcd-operator-b45778765-lv4mz\" (UID: \"f7cb4999-7580-47a2-a3e6-764d49684eaf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lv4mz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.293951 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.293969 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.293984 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/325572f4-db9f-40a9-bd48-df93e3ec42ed-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-ngjm2\" (UID: \"325572f4-db9f-40a9-bd48-df93e3ec42ed\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ngjm2" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.294010 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e536d056-a018-4d58-a85a-24d70c2116fc-serving-cert\") pod \"authentication-operator-69f744f599-xnv2t\" (UID: \"e536d056-a018-4d58-a85a-24d70c2116fc\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xnv2t" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.294180 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.294229 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.294250 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f7cb4999-7580-47a2-a3e6-764d49684eaf-etcd-client\") pod \"etcd-operator-b45778765-lv4mz\" (UID: \"f7cb4999-7580-47a2-a3e6-764d49684eaf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lv4mz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.294286 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwmsq\" (UniqueName: \"kubernetes.io/projected/e536d056-a018-4d58-a85a-24d70c2116fc-kube-api-access-bwmsq\") pod \"authentication-operator-69f744f599-xnv2t\" (UID: \"e536d056-a018-4d58-a85a-24d70c2116fc\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xnv2t" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.294519 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8dkm\" (UniqueName: \"kubernetes.io/projected/325572f4-db9f-40a9-bd48-df93e3ec42ed-kube-api-access-q8dkm\") pod \"cluster-image-registry-operator-dc59b4c8b-ngjm2\" (UID: \"325572f4-db9f-40a9-bd48-df93e3ec42ed\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ngjm2" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.294613 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e536d056-a018-4d58-a85a-24d70c2116fc-service-ca-bundle\") pod \"authentication-operator-69f744f599-xnv2t\" (UID: \"e536d056-a018-4d58-a85a-24d70c2116fc\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xnv2t" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.294644 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.294687 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/325572f4-db9f-40a9-bd48-df93e3ec42ed-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-ngjm2\" (UID: \"325572f4-db9f-40a9-bd48-df93e3ec42ed\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ngjm2" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.294706 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/048223bb-4ff9-41e0-9b78-457a720ff399-installation-pull-secrets\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.294722 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7cb4999-7580-47a2-a3e6-764d49684eaf-config\") pod \"etcd-operator-b45778765-lv4mz\" (UID: \"f7cb4999-7580-47a2-a3e6-764d49684eaf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lv4mz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.294775 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgd9l\" (UniqueName: \"kubernetes.io/projected/048223bb-4ff9-41e0-9b78-457a720ff399-kube-api-access-vgd9l\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.294791 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2mv7\" (UniqueName: \"kubernetes.io/projected/ccd47538-6f91-4c6d-91b0-afccf0c83b20-kube-api-access-m2mv7\") pod \"machine-api-operator-5694c8668f-fp74c\" (UID: \"ccd47538-6f91-4c6d-91b0-afccf0c83b20\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fp74c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.294826 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.294844 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7rp6\" (UniqueName: \"kubernetes.io/projected/54790eef-93bc-4d88-bcc5-3a2f6e1c1fa5-kube-api-access-k7rp6\") pod \"downloads-7954f5f757-tdxkp\" (UID: \"54790eef-93bc-4d88-bcc5-3a2f6e1c1fa5\") " pod="openshift-console/downloads-7954f5f757-tdxkp" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.294864 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/f7cb4999-7580-47a2-a3e6-764d49684eaf-etcd-ca\") pod \"etcd-operator-b45778765-lv4mz\" (UID: \"f7cb4999-7580-47a2-a3e6-764d49684eaf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lv4mz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.294896 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/f7cb4999-7580-47a2-a3e6-764d49684eaf-etcd-service-ca\") pod \"etcd-operator-b45778765-lv4mz\" (UID: \"f7cb4999-7580-47a2-a3e6-764d49684eaf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lv4mz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.294926 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ccd47538-6f91-4c6d-91b0-afccf0c83b20-images\") pod \"machine-api-operator-5694c8668f-fp74c\" (UID: \"ccd47538-6f91-4c6d-91b0-afccf0c83b20\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fp74c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.294972 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.295034 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e536d056-a018-4d58-a85a-24d70c2116fc-config\") pod \"authentication-operator-69f744f599-xnv2t\" (UID: \"e536d056-a018-4d58-a85a-24d70c2116fc\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xnv2t" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.295079 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ccd47538-6f91-4c6d-91b0-afccf0c83b20-config\") pod \"machine-api-operator-5694c8668f-fp74c\" (UID: \"ccd47538-6f91-4c6d-91b0-afccf0c83b20\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fp74c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.295107 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/048223bb-4ff9-41e0-9b78-457a720ff399-ca-trust-extracted\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.295129 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/325572f4-db9f-40a9-bd48-df93e3ec42ed-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-ngjm2\" (UID: \"325572f4-db9f-40a9-bd48-df93e3ec42ed\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ngjm2" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.295147 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e536d056-a018-4d58-a85a-24d70c2116fc-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-xnv2t\" (UID: \"e536d056-a018-4d58-a85a-24d70c2116fc\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xnv2t" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.295165 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qc7nq\" (UniqueName: \"kubernetes.io/projected/7151e9d9-4417-40bb-aac5-8f838065aa79-kube-api-access-qc7nq\") pod \"console-f9d7485db-79sft\" (UID: \"7151e9d9-4417-40bb-aac5-8f838065aa79\") " pod="openshift-console/console-f9d7485db-79sft" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.295181 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/87454cc1-5175-495b-9551-a19474d51e4a-audit-policies\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.295226 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/ccd47538-6f91-4c6d-91b0-afccf0c83b20-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-fp74c\" (UID: \"ccd47538-6f91-4c6d-91b0-afccf0c83b20\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fp74c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.295249 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.360672 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-8m9sz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.365721 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8kz2p" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.396729 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.396891 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/364932b8-9721-41da-a1a3-7b9a1977da84-config\") pod \"kube-controller-manager-operator-78b949d7b-6w5w8\" (UID: \"364932b8-9721-41da-a1a3-7b9a1977da84\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6w5w8" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.396912 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79qt4\" (UniqueName: \"kubernetes.io/projected/2c6f790c-415f-46c9-baba-251cdc1e14b2-kube-api-access-79qt4\") pod \"csi-hostpathplugin-znwgl\" (UID: \"2c6f790c-415f-46c9-baba-251cdc1e14b2\") " pod="hostpath-provisioner/csi-hostpathplugin-znwgl" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.396933 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/325572f4-db9f-40a9-bd48-df93e3ec42ed-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-ngjm2\" (UID: \"325572f4-db9f-40a9-bd48-df93e3ec42ed\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ngjm2" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.396951 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/048223bb-4ff9-41e0-9b78-457a720ff399-installation-pull-secrets\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.396966 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.396982 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/f7cb4999-7580-47a2-a3e6-764d49684eaf-etcd-ca\") pod \"etcd-operator-b45778765-lv4mz\" (UID: \"f7cb4999-7580-47a2-a3e6-764d49684eaf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lv4mz" Sep 30 12:22:48 crc kubenswrapper[5002]: E0930 12:22:48.397026 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:48.896993916 +0000 UTC m=+143.146676052 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.397069 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/834344ed-4a8b-4a1e-be84-33622d21d0af-default-certificate\") pod \"router-default-5444994796-6rw5f\" (UID: \"834344ed-4a8b-4a1e-be84-33622d21d0af\") " pod="openshift-ingress/router-default-5444994796-6rw5f" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.397137 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/834344ed-4a8b-4a1e-be84-33622d21d0af-stats-auth\") pod \"router-default-5444994796-6rw5f\" (UID: \"834344ed-4a8b-4a1e-be84-33622d21d0af\") " pod="openshift-ingress/router-default-5444994796-6rw5f" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.397158 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/2c6f790c-415f-46c9-baba-251cdc1e14b2-csi-data-dir\") pod \"csi-hostpathplugin-znwgl\" (UID: \"2c6f790c-415f-46c9-baba-251cdc1e14b2\") " pod="hostpath-provisioner/csi-hostpathplugin-znwgl" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.397180 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.397199 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ccd47538-6f91-4c6d-91b0-afccf0c83b20-images\") pod \"machine-api-operator-5694c8668f-fp74c\" (UID: \"ccd47538-6f91-4c6d-91b0-afccf0c83b20\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fp74c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.397217 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/364932b8-9721-41da-a1a3-7b9a1977da84-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-6w5w8\" (UID: \"364932b8-9721-41da-a1a3-7b9a1977da84\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6w5w8" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.397232 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ccd47538-6f91-4c6d-91b0-afccf0c83b20-config\") pod \"machine-api-operator-5694c8668f-fp74c\" (UID: \"ccd47538-6f91-4c6d-91b0-afccf0c83b20\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fp74c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.397250 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmbjf\" (UniqueName: \"kubernetes.io/projected/846fc575-9b63-46b3-bd9d-dfc8ab0bf7a2-kube-api-access-fmbjf\") pod \"service-ca-operator-777779d784-wtzqd\" (UID: \"846fc575-9b63-46b3-bd9d-dfc8ab0bf7a2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wtzqd" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.397268 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c53fef56-513a-439c-a05a-cda26f9e3855-trusted-ca\") pod \"ingress-operator-5b745b69d9-wqhm8\" (UID: \"c53fef56-513a-439c-a05a-cda26f9e3855\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-wqhm8" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.397289 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/325572f4-db9f-40a9-bd48-df93e3ec42ed-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-ngjm2\" (UID: \"325572f4-db9f-40a9-bd48-df93e3ec42ed\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ngjm2" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.397309 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/dfee3432-722f-454a-bd40-36fcfe1fb935-apiservice-cert\") pod \"packageserver-d55dfcdfc-sczzw\" (UID: \"dfee3432-722f-454a-bd40-36fcfe1fb935\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sczzw" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.397328 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.397365 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.397381 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cae8a060-bc60-476a-9bee-11637282a23f-config-volume\") pod \"dns-default-h52ns\" (UID: \"cae8a060-bc60-476a-9bee-11637282a23f\") " pod="openshift-dns/dns-default-h52ns" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.397400 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/048223bb-4ff9-41e0-9b78-457a720ff399-bound-sa-token\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.397417 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpjhv\" (UniqueName: \"kubernetes.io/projected/89d8a136-01a4-487f-8160-9a0cce584cec-kube-api-access-jpjhv\") pod \"migrator-59844c95c7-l9vch\" (UID: \"89d8a136-01a4-487f-8160-9a0cce584cec\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-l9vch" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.397437 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvwsk\" (UniqueName: \"kubernetes.io/projected/dfee3432-722f-454a-bd40-36fcfe1fb935-kube-api-access-jvwsk\") pod \"packageserver-d55dfcdfc-sczzw\" (UID: \"dfee3432-722f-454a-bd40-36fcfe1fb935\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sczzw" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.397488 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/0eff3533-ad12-4c4b-bd1e-6a92d09d2835-signing-key\") pod \"service-ca-9c57cc56f-4nrzw\" (UID: \"0eff3533-ad12-4c4b-bd1e-6a92d09d2835\") " pod="openshift-service-ca/service-ca-9c57cc56f-4nrzw" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.397581 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7151e9d9-4417-40bb-aac5-8f838065aa79-trusted-ca-bundle\") pod \"console-f9d7485db-79sft\" (UID: \"7151e9d9-4417-40bb-aac5-8f838065aa79\") " pod="openshift-console/console-f9d7485db-79sft" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.397599 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ml679\" (UniqueName: \"kubernetes.io/projected/1fa6d0bc-ffc5-4974-bb50-123dd96187bd-kube-api-access-ml679\") pod \"machine-config-controller-84d6567774-djckq\" (UID: \"1fa6d0bc-ffc5-4974-bb50-123dd96187bd\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djckq" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.397617 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/2c6f790c-415f-46c9-baba-251cdc1e14b2-mountpoint-dir\") pod \"csi-hostpathplugin-znwgl\" (UID: \"2c6f790c-415f-46c9-baba-251cdc1e14b2\") " pod="hostpath-provisioner/csi-hostpathplugin-znwgl" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.397634 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/048223bb-4ff9-41e0-9b78-457a720ff399-registry-tls\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.397649 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/048223bb-4ff9-41e0-9b78-457a720ff399-registry-certificates\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.397653 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/f7cb4999-7580-47a2-a3e6-764d49684eaf-etcd-ca\") pod \"etcd-operator-b45778765-lv4mz\" (UID: \"f7cb4999-7580-47a2-a3e6-764d49684eaf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lv4mz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.397663 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtlqs\" (UniqueName: \"kubernetes.io/projected/87454cc1-5175-495b-9551-a19474d51e4a-kube-api-access-xtlqs\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.397679 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5f1cdbf0-65a0-4621-a4de-2337b8151bd3-metrics-tls\") pod \"dns-operator-744455d44c-dgbs4\" (UID: \"5f1cdbf0-65a0-4621-a4de-2337b8151bd3\") " pod="openshift-dns-operator/dns-operator-744455d44c-dgbs4" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.398598 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.399143 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ccd47538-6f91-4c6d-91b0-afccf0c83b20-images\") pod \"machine-api-operator-5694c8668f-fp74c\" (UID: \"ccd47538-6f91-4c6d-91b0-afccf0c83b20\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fp74c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.411292 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/048223bb-4ff9-41e0-9b78-457a720ff399-registry-certificates\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.411868 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ccd47538-6f91-4c6d-91b0-afccf0c83b20-config\") pod \"machine-api-operator-5694c8668f-fp74c\" (UID: \"ccd47538-6f91-4c6d-91b0-afccf0c83b20\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fp74c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.412213 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/c53fef56-513a-439c-a05a-cda26f9e3855-metrics-tls\") pod \"ingress-operator-5b745b69d9-wqhm8\" (UID: \"c53fef56-513a-439c-a05a-cda26f9e3855\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-wqhm8" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.412235 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/364932b8-9721-41da-a1a3-7b9a1977da84-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-6w5w8\" (UID: \"364932b8-9721-41da-a1a3-7b9a1977da84\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6w5w8" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.412288 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n459d\" (UniqueName: \"kubernetes.io/projected/796cc797-549b-40ee-8bf9-66a8d9b54703-kube-api-access-n459d\") pod \"catalog-operator-68c6474976-6tqs2\" (UID: \"796cc797-549b-40ee-8bf9-66a8d9b54703\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6tqs2" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.412307 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lc5bz\" (UniqueName: \"kubernetes.io/projected/f1a5276b-5811-4a19-8d31-a46a1c787d8a-kube-api-access-lc5bz\") pod \"package-server-manager-789f6589d5-2jxnc\" (UID: \"f1a5276b-5811-4a19-8d31-a46a1c787d8a\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2jxnc" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.412323 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2rjz\" (UniqueName: \"kubernetes.io/projected/b282f088-14fa-47c0-a8a8-1285a8a7c296-kube-api-access-s2rjz\") pod \"multus-admission-controller-857f4d67dd-88xml\" (UID: \"b282f088-14fa-47c0-a8a8-1285a8a7c296\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-88xml" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.412340 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/fef68bfa-20e4-4a83-8642-be28f0f0b31a-images\") pod \"machine-config-operator-74547568cd-2lfhz\" (UID: \"fef68bfa-20e4-4a83-8642-be28f0f0b31a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2lfhz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.414384 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7151e9d9-4417-40bb-aac5-8f838065aa79-trusted-ca-bundle\") pod \"console-f9d7485db-79sft\" (UID: \"7151e9d9-4417-40bb-aac5-8f838065aa79\") " pod="openshift-console/console-f9d7485db-79sft" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.415672 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/048223bb-4ff9-41e0-9b78-457a720ff399-trusted-ca\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.415700 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/f1a5276b-5811-4a19-8d31-a46a1c787d8a-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-2jxnc\" (UID: \"f1a5276b-5811-4a19-8d31-a46a1c787d8a\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2jxnc" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.415723 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/cae8a060-bc60-476a-9bee-11637282a23f-metrics-tls\") pod \"dns-default-h52ns\" (UID: \"cae8a060-bc60-476a-9bee-11637282a23f\") " pod="openshift-dns/dns-default-h52ns" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.416663 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.416738 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c0ed680-87c2-438b-aca7-b9fa1c19d414-config\") pod \"route-controller-manager-6576b87f9c-57t2m\" (UID: \"1c0ed680-87c2-438b-aca7-b9fa1c19d414\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.416759 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b282f088-14fa-47c0-a8a8-1285a8a7c296-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-88xml\" (UID: \"b282f088-14fa-47c0-a8a8-1285a8a7c296\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-88xml" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.416795 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpnvt\" (UniqueName: \"kubernetes.io/projected/1c0ed680-87c2-438b-aca7-b9fa1c19d414-kube-api-access-hpnvt\") pod \"route-controller-manager-6576b87f9c-57t2m\" (UID: \"1c0ed680-87c2-438b-aca7-b9fa1c19d414\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.416812 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bae0075d-69e4-48a2-91f2-a6e2df1529f0-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-v6kxk\" (UID: \"bae0075d-69e4-48a2-91f2-a6e2df1529f0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v6kxk" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.416856 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7151e9d9-4417-40bb-aac5-8f838065aa79-console-config\") pod \"console-f9d7485db-79sft\" (UID: \"7151e9d9-4417-40bb-aac5-8f838065aa79\") " pod="openshift-console/console-f9d7485db-79sft" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.416873 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hns4t\" (UniqueName: \"kubernetes.io/projected/c53fef56-513a-439c-a05a-cda26f9e3855-kube-api-access-hns4t\") pod \"ingress-operator-5b745b69d9-wqhm8\" (UID: \"c53fef56-513a-439c-a05a-cda26f9e3855\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-wqhm8" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.416945 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.416964 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/2c6f790c-415f-46c9-baba-251cdc1e14b2-plugins-dir\") pod \"csi-hostpathplugin-znwgl\" (UID: \"2c6f790c-415f-46c9-baba-251cdc1e14b2\") " pod="hostpath-provisioner/csi-hostpathplugin-znwgl" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.416983 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/325572f4-db9f-40a9-bd48-df93e3ec42ed-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-ngjm2\" (UID: \"325572f4-db9f-40a9-bd48-df93e3ec42ed\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ngjm2" Sep 30 12:22:48 crc kubenswrapper[5002]: E0930 12:22:48.417159 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:48.917148951 +0000 UTC m=+143.166831097 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.417290 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/048223bb-4ff9-41e0-9b78-457a720ff399-trusted-ca\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.418018 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/325572f4-db9f-40a9-bd48-df93e3ec42ed-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-ngjm2\" (UID: \"325572f4-db9f-40a9-bd48-df93e3ec42ed\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ngjm2" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.418066 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.418087 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.419044 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/325572f4-db9f-40a9-bd48-df93e3ec42ed-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-ngjm2\" (UID: \"325572f4-db9f-40a9-bd48-df93e3ec42ed\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ngjm2" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.419110 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.419897 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/07d72c87-0749-4f19-b53f-7733b07cc149-srv-cert\") pod \"olm-operator-6b444d44fb-9kzms\" (UID: \"07d72c87-0749-4f19-b53f-7733b07cc149\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9kzms" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.419935 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/2c6f790c-415f-46c9-baba-251cdc1e14b2-socket-dir\") pod \"csi-hostpathplugin-znwgl\" (UID: \"2c6f790c-415f-46c9-baba-251cdc1e14b2\") " pod="hostpath-provisioner/csi-hostpathplugin-znwgl" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.419957 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.420017 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f7cb4999-7580-47a2-a3e6-764d49684eaf-etcd-client\") pod \"etcd-operator-b45778765-lv4mz\" (UID: \"f7cb4999-7580-47a2-a3e6-764d49684eaf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lv4mz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.420035 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/07d72c87-0749-4f19-b53f-7733b07cc149-profile-collector-cert\") pod \"olm-operator-6b444d44fb-9kzms\" (UID: \"07d72c87-0749-4f19-b53f-7733b07cc149\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9kzms" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.420051 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9t94x\" (UniqueName: \"kubernetes.io/projected/5f1cdbf0-65a0-4621-a4de-2337b8151bd3-kube-api-access-9t94x\") pod \"dns-operator-744455d44c-dgbs4\" (UID: \"5f1cdbf0-65a0-4621-a4de-2337b8151bd3\") " pod="openshift-dns-operator/dns-operator-744455d44c-dgbs4" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.420676 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7151e9d9-4417-40bb-aac5-8f838065aa79-console-config\") pod \"console-f9d7485db-79sft\" (UID: \"7151e9d9-4417-40bb-aac5-8f838065aa79\") " pod="openshift-console/console-f9d7485db-79sft" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.424380 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.424490 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c0ed680-87c2-438b-aca7-b9fa1c19d414-config\") pod \"route-controller-manager-6576b87f9c-57t2m\" (UID: \"1c0ed680-87c2-438b-aca7-b9fa1c19d414\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.424670 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8dkm\" (UniqueName: \"kubernetes.io/projected/325572f4-db9f-40a9-bd48-df93e3ec42ed-kube-api-access-q8dkm\") pod \"cluster-image-registry-operator-dc59b4c8b-ngjm2\" (UID: \"325572f4-db9f-40a9-bd48-df93e3ec42ed\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ngjm2" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.424769 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0335170b-46ee-4cd3-aae5-694623192d49-config-volume\") pod \"collect-profiles-29320575-h29rv\" (UID: \"0335170b-46ee-4cd3-aae5-694623192d49\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320575-h29rv" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.424953 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/454fbe1a-1b3c-46e8-855c-a2dd62ea2b9e-node-bootstrap-token\") pod \"machine-config-server-rps48\" (UID: \"454fbe1a-1b3c-46e8-855c-a2dd62ea2b9e\") " pod="openshift-machine-config-operator/machine-config-server-rps48" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.425027 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.425091 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/834344ed-4a8b-4a1e-be84-33622d21d0af-service-ca-bundle\") pod \"router-default-5444994796-6rw5f\" (UID: \"834344ed-4a8b-4a1e-be84-33622d21d0af\") " pod="openshift-ingress/router-default-5444994796-6rw5f" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.425764 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.425945 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7cb4999-7580-47a2-a3e6-764d49684eaf-config\") pod \"etcd-operator-b45778765-lv4mz\" (UID: \"f7cb4999-7580-47a2-a3e6-764d49684eaf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lv4mz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.425978 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgd9l\" (UniqueName: \"kubernetes.io/projected/048223bb-4ff9-41e0-9b78-457a720ff399-kube-api-access-vgd9l\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.425995 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2mv7\" (UniqueName: \"kubernetes.io/projected/ccd47538-6f91-4c6d-91b0-afccf0c83b20-kube-api-access-m2mv7\") pod \"machine-api-operator-5694c8668f-fp74c\" (UID: \"ccd47538-6f91-4c6d-91b0-afccf0c83b20\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fp74c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.426029 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7rp6\" (UniqueName: \"kubernetes.io/projected/54790eef-93bc-4d88-bcc5-3a2f6e1c1fa5-kube-api-access-k7rp6\") pod \"downloads-7954f5f757-tdxkp\" (UID: \"54790eef-93bc-4d88-bcc5-3a2f6e1c1fa5\") " pod="openshift-console/downloads-7954f5f757-tdxkp" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.426047 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/f7cb4999-7580-47a2-a3e6-764d49684eaf-etcd-service-ca\") pod \"etcd-operator-b45778765-lv4mz\" (UID: \"f7cb4999-7580-47a2-a3e6-764d49684eaf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lv4mz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.426580 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7cb4999-7580-47a2-a3e6-764d49684eaf-config\") pod \"etcd-operator-b45778765-lv4mz\" (UID: \"f7cb4999-7580-47a2-a3e6-764d49684eaf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lv4mz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.426624 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b1374f1-9722-4483-b976-775bdd8cb65a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-k6j67\" (UID: \"5b1374f1-9722-4483-b976-775bdd8cb65a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k6j67" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.426672 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0335170b-46ee-4cd3-aae5-694623192d49-secret-volume\") pod \"collect-profiles-29320575-h29rv\" (UID: \"0335170b-46ee-4cd3-aae5-694623192d49\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320575-h29rv" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.426719 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82tzc\" (UniqueName: \"kubernetes.io/projected/b91e38cc-c852-407d-8efd-227f0bfaa5fb-kube-api-access-82tzc\") pod \"cluster-samples-operator-665b6dd947-n5jzk\" (UID: \"b91e38cc-c852-407d-8efd-227f0bfaa5fb\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n5jzk" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.426737 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/acdc25a7-3353-430f-b856-22a1259025ee-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hc84k\" (UID: \"acdc25a7-3353-430f-b856-22a1259025ee\") " pod="openshift-marketplace/marketplace-operator-79b997595-hc84k" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.426781 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e536d056-a018-4d58-a85a-24d70c2116fc-config\") pod \"authentication-operator-69f744f599-xnv2t\" (UID: \"e536d056-a018-4d58-a85a-24d70c2116fc\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xnv2t" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.426806 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6ld9\" (UniqueName: \"kubernetes.io/projected/07d72c87-0749-4f19-b53f-7733b07cc149-kube-api-access-j6ld9\") pod \"olm-operator-6b444d44fb-9kzms\" (UID: \"07d72c87-0749-4f19-b53f-7733b07cc149\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9kzms" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.426937 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjxqj\" (UniqueName: \"kubernetes.io/projected/fef68bfa-20e4-4a83-8642-be28f0f0b31a-kube-api-access-tjxqj\") pod \"machine-config-operator-74547568cd-2lfhz\" (UID: \"fef68bfa-20e4-4a83-8642-be28f0f0b31a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2lfhz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.426975 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/2c6f790c-415f-46c9-baba-251cdc1e14b2-registration-dir\") pod \"csi-hostpathplugin-znwgl\" (UID: \"2c6f790c-415f-46c9-baba-251cdc1e14b2\") " pod="hostpath-provisioner/csi-hostpathplugin-znwgl" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.427002 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/048223bb-4ff9-41e0-9b78-457a720ff399-ca-trust-extracted\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.427018 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/f27fcf7d-17db-407a-b6ee-e34779332edf-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-2pmxc\" (UID: \"f27fcf7d-17db-407a-b6ee-e34779332edf\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2pmxc" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.427035 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/f7cb4999-7580-47a2-a3e6-764d49684eaf-etcd-service-ca\") pod \"etcd-operator-b45778765-lv4mz\" (UID: \"f7cb4999-7580-47a2-a3e6-764d49684eaf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lv4mz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.427059 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1fa6d0bc-ffc5-4974-bb50-123dd96187bd-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-djckq\" (UID: \"1fa6d0bc-ffc5-4974-bb50-123dd96187bd\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djckq" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.427111 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/796cc797-549b-40ee-8bf9-66a8d9b54703-profile-collector-cert\") pod \"catalog-operator-68c6474976-6tqs2\" (UID: \"796cc797-549b-40ee-8bf9-66a8d9b54703\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6tqs2" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.427134 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8t6nq\" (UniqueName: \"kubernetes.io/projected/cae8a060-bc60-476a-9bee-11637282a23f-kube-api-access-8t6nq\") pod \"dns-default-h52ns\" (UID: \"cae8a060-bc60-476a-9bee-11637282a23f\") " pod="openshift-dns/dns-default-h52ns" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.427279 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e536d056-a018-4d58-a85a-24d70c2116fc-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-xnv2t\" (UID: \"e536d056-a018-4d58-a85a-24d70c2116fc\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xnv2t" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.427303 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qc7nq\" (UniqueName: \"kubernetes.io/projected/7151e9d9-4417-40bb-aac5-8f838065aa79-kube-api-access-qc7nq\") pod \"console-f9d7485db-79sft\" (UID: \"7151e9d9-4417-40bb-aac5-8f838065aa79\") " pod="openshift-console/console-f9d7485db-79sft" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.427379 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e536d056-a018-4d58-a85a-24d70c2116fc-config\") pod \"authentication-operator-69f744f599-xnv2t\" (UID: \"e536d056-a018-4d58-a85a-24d70c2116fc\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xnv2t" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.427417 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/048223bb-4ff9-41e0-9b78-457a720ff399-ca-trust-extracted\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.427460 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/87454cc1-5175-495b-9551-a19474d51e4a-audit-policies\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.427531 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/ccd47538-6f91-4c6d-91b0-afccf0c83b20-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-fp74c\" (UID: \"ccd47538-6f91-4c6d-91b0-afccf0c83b20\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fp74c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.427552 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1fa6d0bc-ffc5-4974-bb50-123dd96187bd-proxy-tls\") pod \"machine-config-controller-84d6567774-djckq\" (UID: \"1fa6d0bc-ffc5-4974-bb50-123dd96187bd\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djckq" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.427678 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e10da470-8dec-47bc-8d4b-0cbc5362aea0-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-dnmjr\" (UID: \"e10da470-8dec-47bc-8d4b-0cbc5362aea0\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dnmjr" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.427824 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/846fc575-9b63-46b3-bd9d-dfc8ab0bf7a2-serving-cert\") pod \"service-ca-operator-777779d784-wtzqd\" (UID: \"846fc575-9b63-46b3-bd9d-dfc8ab0bf7a2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wtzqd" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.427874 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/87454cc1-5175-495b-9551-a19474d51e4a-audit-dir\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.427905 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tw4gl\" (UniqueName: \"kubernetes.io/projected/454fbe1a-1b3c-46e8-855c-a2dd62ea2b9e-kube-api-access-tw4gl\") pod \"machine-config-server-rps48\" (UID: \"454fbe1a-1b3c-46e8-855c-a2dd62ea2b9e\") " pod="openshift-machine-config-operator/machine-config-server-rps48" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.427924 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/87454cc1-5175-495b-9551-a19474d51e4a-audit-dir\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.427929 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/87454cc1-5175-495b-9551-a19474d51e4a-audit-policies\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.427959 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.428245 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5b1374f1-9722-4483-b976-775bdd8cb65a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-k6j67\" (UID: \"5b1374f1-9722-4483-b976-775bdd8cb65a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k6j67" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.428274 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/454fbe1a-1b3c-46e8-855c-a2dd62ea2b9e-certs\") pod \"machine-config-server-rps48\" (UID: \"454fbe1a-1b3c-46e8-855c-a2dd62ea2b9e\") " pod="openshift-machine-config-operator/machine-config-server-rps48" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.428289 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/dfee3432-722f-454a-bd40-36fcfe1fb935-tmpfs\") pod \"packageserver-d55dfcdfc-sczzw\" (UID: \"dfee3432-722f-454a-bd40-36fcfe1fb935\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sczzw" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.428313 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5b1374f1-9722-4483-b976-775bdd8cb65a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-k6j67\" (UID: \"5b1374f1-9722-4483-b976-775bdd8cb65a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k6j67" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.428339 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e10da470-8dec-47bc-8d4b-0cbc5362aea0-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-dnmjr\" (UID: \"e10da470-8dec-47bc-8d4b-0cbc5362aea0\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dnmjr" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.428371 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bae0075d-69e4-48a2-91f2-a6e2df1529f0-config\") pod \"kube-apiserver-operator-766d6c64bb-v6kxk\" (UID: \"bae0075d-69e4-48a2-91f2-a6e2df1529f0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v6kxk" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.428388 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e536d056-a018-4d58-a85a-24d70c2116fc-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-xnv2t\" (UID: \"e536d056-a018-4d58-a85a-24d70c2116fc\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xnv2t" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.428389 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bae0075d-69e4-48a2-91f2-a6e2df1529f0-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-v6kxk\" (UID: \"bae0075d-69e4-48a2-91f2-a6e2df1529f0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v6kxk" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.428419 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/dfee3432-722f-454a-bd40-36fcfe1fb935-webhook-cert\") pod \"packageserver-d55dfcdfc-sczzw\" (UID: \"dfee3432-722f-454a-bd40-36fcfe1fb935\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sczzw" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.428436 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcdgq\" (UniqueName: \"kubernetes.io/projected/acdc25a7-3353-430f-b856-22a1259025ee-kube-api-access-wcdgq\") pod \"marketplace-operator-79b997595-hc84k\" (UID: \"acdc25a7-3353-430f-b856-22a1259025ee\") " pod="openshift-marketplace/marketplace-operator-79b997595-hc84k" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.428472 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fef68bfa-20e4-4a83-8642-be28f0f0b31a-proxy-tls\") pod \"machine-config-operator-74547568cd-2lfhz\" (UID: \"fef68bfa-20e4-4a83-8642-be28f0f0b31a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2lfhz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.428502 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/b91e38cc-c852-407d-8efd-227f0bfaa5fb-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-n5jzk\" (UID: \"b91e38cc-c852-407d-8efd-227f0bfaa5fb\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n5jzk" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.428520 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7151e9d9-4417-40bb-aac5-8f838065aa79-service-ca\") pod \"console-f9d7485db-79sft\" (UID: \"7151e9d9-4417-40bb-aac5-8f838065aa79\") " pod="openshift-console/console-f9d7485db-79sft" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.428628 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1c0ed680-87c2-438b-aca7-b9fa1c19d414-serving-cert\") pod \"route-controller-manager-6576b87f9c-57t2m\" (UID: \"1c0ed680-87c2-438b-aca7-b9fa1c19d414\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.428698 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/846fc575-9b63-46b3-bd9d-dfc8ab0bf7a2-config\") pod \"service-ca-operator-777779d784-wtzqd\" (UID: \"846fc575-9b63-46b3-bd9d-dfc8ab0bf7a2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wtzqd" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.428724 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c53fef56-513a-439c-a05a-cda26f9e3855-bound-sa-token\") pod \"ingress-operator-5b745b69d9-wqhm8\" (UID: \"c53fef56-513a-439c-a05a-cda26f9e3855\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-wqhm8" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.428774 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p26xr\" (UniqueName: \"kubernetes.io/projected/f7cb4999-7580-47a2-a3e6-764d49684eaf-kube-api-access-p26xr\") pod \"etcd-operator-b45778765-lv4mz\" (UID: \"f7cb4999-7580-47a2-a3e6-764d49684eaf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lv4mz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.428921 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1c0ed680-87c2-438b-aca7-b9fa1c19d414-client-ca\") pod \"route-controller-manager-6576b87f9c-57t2m\" (UID: \"1c0ed680-87c2-438b-aca7-b9fa1c19d414\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.428954 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/acdc25a7-3353-430f-b856-22a1259025ee-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hc84k\" (UID: \"acdc25a7-3353-430f-b856-22a1259025ee\") " pod="openshift-marketplace/marketplace-operator-79b997595-hc84k" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.428970 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7151e9d9-4417-40bb-aac5-8f838065aa79-oauth-serving-cert\") pod \"console-f9d7485db-79sft\" (UID: \"7151e9d9-4417-40bb-aac5-8f838065aa79\") " pod="openshift-console/console-f9d7485db-79sft" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.429018 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fef68bfa-20e4-4a83-8642-be28f0f0b31a-auth-proxy-config\") pod \"machine-config-operator-74547568cd-2lfhz\" (UID: \"fef68bfa-20e4-4a83-8642-be28f0f0b31a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2lfhz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.429036 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/834344ed-4a8b-4a1e-be84-33622d21d0af-metrics-certs\") pod \"router-default-5444994796-6rw5f\" (UID: \"834344ed-4a8b-4a1e-be84-33622d21d0af\") " pod="openshift-ingress/router-default-5444994796-6rw5f" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.429057 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/14158f4f-292f-4693-9ed2-c7fa97452ecb-serving-cert\") pod \"openshift-config-operator-7777fb866f-hns42\" (UID: \"14158f4f-292f-4693-9ed2-c7fa97452ecb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hns42" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.429114 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5d8qh\" (UniqueName: \"kubernetes.io/projected/14158f4f-292f-4693-9ed2-c7fa97452ecb-kube-api-access-5d8qh\") pod \"openshift-config-operator-7777fb866f-hns42\" (UID: \"14158f4f-292f-4693-9ed2-c7fa97452ecb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hns42" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.429131 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7151e9d9-4417-40bb-aac5-8f838065aa79-console-oauth-config\") pod \"console-f9d7485db-79sft\" (UID: \"7151e9d9-4417-40bb-aac5-8f838065aa79\") " pod="openshift-console/console-f9d7485db-79sft" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.429151 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xt4r8\" (UniqueName: \"kubernetes.io/projected/e10da470-8dec-47bc-8d4b-0cbc5362aea0-kube-api-access-xt4r8\") pod \"kube-storage-version-migrator-operator-b67b599dd-dnmjr\" (UID: \"e10da470-8dec-47bc-8d4b-0cbc5362aea0\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dnmjr" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.429204 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.429739 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7151e9d9-4417-40bb-aac5-8f838065aa79-service-ca\") pod \"console-f9d7485db-79sft\" (UID: \"7151e9d9-4417-40bb-aac5-8f838065aa79\") " pod="openshift-console/console-f9d7485db-79sft" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.430292 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1c0ed680-87c2-438b-aca7-b9fa1c19d414-client-ca\") pod \"route-controller-manager-6576b87f9c-57t2m\" (UID: \"1c0ed680-87c2-438b-aca7-b9fa1c19d414\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.430480 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7151e9d9-4417-40bb-aac5-8f838065aa79-oauth-serving-cert\") pod \"console-f9d7485db-79sft\" (UID: \"7151e9d9-4417-40bb-aac5-8f838065aa79\") " pod="openshift-console/console-f9d7485db-79sft" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.431810 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fea8ba5e-73ec-4bce-99f6-72b720813d96-cert\") pod \"ingress-canary-2fqm7\" (UID: \"fea8ba5e-73ec-4bce-99f6-72b720813d96\") " pod="openshift-ingress-canary/ingress-canary-2fqm7" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.431860 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/14158f4f-292f-4693-9ed2-c7fa97452ecb-available-featuregates\") pod \"openshift-config-operator-7777fb866f-hns42\" (UID: \"14158f4f-292f-4693-9ed2-c7fa97452ecb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hns42" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.431905 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7151e9d9-4417-40bb-aac5-8f838065aa79-console-serving-cert\") pod \"console-f9d7485db-79sft\" (UID: \"7151e9d9-4417-40bb-aac5-8f838065aa79\") " pod="openshift-console/console-f9d7485db-79sft" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.431924 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rsrj\" (UniqueName: \"kubernetes.io/projected/f27fcf7d-17db-407a-b6ee-e34779332edf-kube-api-access-7rsrj\") pod \"control-plane-machine-set-operator-78cbb6b69f-2pmxc\" (UID: \"f27fcf7d-17db-407a-b6ee-e34779332edf\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2pmxc" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.432146 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/14158f4f-292f-4693-9ed2-c7fa97452ecb-available-featuregates\") pod \"openshift-config-operator-7777fb866f-hns42\" (UID: \"14158f4f-292f-4693-9ed2-c7fa97452ecb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hns42" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.432180 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/796cc797-549b-40ee-8bf9-66a8d9b54703-srv-cert\") pod \"catalog-operator-68c6474976-6tqs2\" (UID: \"796cc797-549b-40ee-8bf9-66a8d9b54703\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6tqs2" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.432208 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f7cb4999-7580-47a2-a3e6-764d49684eaf-serving-cert\") pod \"etcd-operator-b45778765-lv4mz\" (UID: \"f7cb4999-7580-47a2-a3e6-764d49684eaf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lv4mz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.432254 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/0eff3533-ad12-4c4b-bd1e-6a92d09d2835-signing-cabundle\") pod \"service-ca-9c57cc56f-4nrzw\" (UID: \"0eff3533-ad12-4c4b-bd1e-6a92d09d2835\") " pod="openshift-service-ca/service-ca-9c57cc56f-4nrzw" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.432278 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e536d056-a018-4d58-a85a-24d70c2116fc-serving-cert\") pod \"authentication-operator-69f744f599-xnv2t\" (UID: \"e536d056-a018-4d58-a85a-24d70c2116fc\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xnv2t" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.433254 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f7cb4999-7580-47a2-a3e6-764d49684eaf-etcd-client\") pod \"etcd-operator-b45778765-lv4mz\" (UID: \"f7cb4999-7580-47a2-a3e6-764d49684eaf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lv4mz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.433716 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwmsq\" (UniqueName: \"kubernetes.io/projected/e536d056-a018-4d58-a85a-24d70c2116fc-kube-api-access-bwmsq\") pod \"authentication-operator-69f744f599-xnv2t\" (UID: \"e536d056-a018-4d58-a85a-24d70c2116fc\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xnv2t" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.433751 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vk627\" (UniqueName: \"kubernetes.io/projected/0eff3533-ad12-4c4b-bd1e-6a92d09d2835-kube-api-access-vk627\") pod \"service-ca-9c57cc56f-4nrzw\" (UID: \"0eff3533-ad12-4c4b-bd1e-6a92d09d2835\") " pod="openshift-service-ca/service-ca-9c57cc56f-4nrzw" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.433894 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwrvp\" (UniqueName: \"kubernetes.io/projected/0335170b-46ee-4cd3-aae5-694623192d49-kube-api-access-dwrvp\") pod \"collect-profiles-29320575-h29rv\" (UID: \"0335170b-46ee-4cd3-aae5-694623192d49\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320575-h29rv" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.433913 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p89l8\" (UniqueName: \"kubernetes.io/projected/834344ed-4a8b-4a1e-be84-33622d21d0af-kube-api-access-p89l8\") pod \"router-default-5444994796-6rw5f\" (UID: \"834344ed-4a8b-4a1e-be84-33622d21d0af\") " pod="openshift-ingress/router-default-5444994796-6rw5f" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.433938 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e536d056-a018-4d58-a85a-24d70c2116fc-service-ca-bundle\") pod \"authentication-operator-69f744f599-xnv2t\" (UID: \"e536d056-a018-4d58-a85a-24d70c2116fc\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xnv2t" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.433955 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvvkt\" (UniqueName: \"kubernetes.io/projected/fea8ba5e-73ec-4bce-99f6-72b720813d96-kube-api-access-mvvkt\") pod \"ingress-canary-2fqm7\" (UID: \"fea8ba5e-73ec-4bce-99f6-72b720813d96\") " pod="openshift-ingress-canary/ingress-canary-2fqm7" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.434654 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e536d056-a018-4d58-a85a-24d70c2116fc-service-ca-bundle\") pod \"authentication-operator-69f744f599-xnv2t\" (UID: \"e536d056-a018-4d58-a85a-24d70c2116fc\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xnv2t" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.445329 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/14158f4f-292f-4693-9ed2-c7fa97452ecb-serving-cert\") pod \"openshift-config-operator-7777fb866f-hns42\" (UID: \"14158f4f-292f-4693-9ed2-c7fa97452ecb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hns42" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.449465 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/ccd47538-6f91-4c6d-91b0-afccf0c83b20-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-fp74c\" (UID: \"ccd47538-6f91-4c6d-91b0-afccf0c83b20\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fp74c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.451361 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.453331 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.453393 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1c0ed680-87c2-438b-aca7-b9fa1c19d414-serving-cert\") pod \"route-controller-manager-6576b87f9c-57t2m\" (UID: \"1c0ed680-87c2-438b-aca7-b9fa1c19d414\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.453748 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.453891 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.454027 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7151e9d9-4417-40bb-aac5-8f838065aa79-console-oauth-config\") pod \"console-f9d7485db-79sft\" (UID: \"7151e9d9-4417-40bb-aac5-8f838065aa79\") " pod="openshift-console/console-f9d7485db-79sft" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.454474 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/048223bb-4ff9-41e0-9b78-457a720ff399-installation-pull-secrets\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.454514 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.454883 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/048223bb-4ff9-41e0-9b78-457a720ff399-registry-tls\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.456395 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.460174 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7151e9d9-4417-40bb-aac5-8f838065aa79-console-serving-cert\") pod \"console-f9d7485db-79sft\" (UID: \"7151e9d9-4417-40bb-aac5-8f838065aa79\") " pod="openshift-console/console-f9d7485db-79sft" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.460566 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f7cb4999-7580-47a2-a3e6-764d49684eaf-serving-cert\") pod \"etcd-operator-b45778765-lv4mz\" (UID: \"f7cb4999-7580-47a2-a3e6-764d49684eaf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lv4mz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.461967 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.462250 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e536d056-a018-4d58-a85a-24d70c2116fc-serving-cert\") pod \"authentication-operator-69f744f599-xnv2t\" (UID: \"e536d056-a018-4d58-a85a-24d70c2116fc\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xnv2t" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.476947 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/325572f4-db9f-40a9-bd48-df93e3ec42ed-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-ngjm2\" (UID: \"325572f4-db9f-40a9-bd48-df93e3ec42ed\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ngjm2" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.486349 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/048223bb-4ff9-41e0-9b78-457a720ff399-bound-sa-token\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.512316 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtlqs\" (UniqueName: \"kubernetes.io/projected/87454cc1-5175-495b-9551-a19474d51e4a-kube-api-access-xtlqs\") pod \"oauth-openshift-558db77b4-wk84c\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.513932 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpnvt\" (UniqueName: \"kubernetes.io/projected/1c0ed680-87c2-438b-aca7-b9fa1c19d414-kube-api-access-hpnvt\") pod \"route-controller-manager-6576b87f9c-57t2m\" (UID: \"1c0ed680-87c2-438b-aca7-b9fa1c19d414\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.540801 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.540930 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1fa6d0bc-ffc5-4974-bb50-123dd96187bd-proxy-tls\") pod \"machine-config-controller-84d6567774-djckq\" (UID: \"1fa6d0bc-ffc5-4974-bb50-123dd96187bd\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djckq" Sep 30 12:22:48 crc kubenswrapper[5002]: E0930 12:22:48.541803 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:49.041775017 +0000 UTC m=+143.291457223 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.542801 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/846fc575-9b63-46b3-bd9d-dfc8ab0bf7a2-serving-cert\") pod \"service-ca-operator-777779d784-wtzqd\" (UID: \"846fc575-9b63-46b3-bd9d-dfc8ab0bf7a2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wtzqd" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.542825 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e10da470-8dec-47bc-8d4b-0cbc5362aea0-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-dnmjr\" (UID: \"e10da470-8dec-47bc-8d4b-0cbc5362aea0\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dnmjr" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.542845 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tw4gl\" (UniqueName: \"kubernetes.io/projected/454fbe1a-1b3c-46e8-855c-a2dd62ea2b9e-kube-api-access-tw4gl\") pod \"machine-config-server-rps48\" (UID: \"454fbe1a-1b3c-46e8-855c-a2dd62ea2b9e\") " pod="openshift-machine-config-operator/machine-config-server-rps48" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.542862 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5b1374f1-9722-4483-b976-775bdd8cb65a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-k6j67\" (UID: \"5b1374f1-9722-4483-b976-775bdd8cb65a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k6j67" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.542880 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/454fbe1a-1b3c-46e8-855c-a2dd62ea2b9e-certs\") pod \"machine-config-server-rps48\" (UID: \"454fbe1a-1b3c-46e8-855c-a2dd62ea2b9e\") " pod="openshift-machine-config-operator/machine-config-server-rps48" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.542896 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/dfee3432-722f-454a-bd40-36fcfe1fb935-tmpfs\") pod \"packageserver-d55dfcdfc-sczzw\" (UID: \"dfee3432-722f-454a-bd40-36fcfe1fb935\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sczzw" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.542914 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5b1374f1-9722-4483-b976-775bdd8cb65a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-k6j67\" (UID: \"5b1374f1-9722-4483-b976-775bdd8cb65a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k6j67" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.542929 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e10da470-8dec-47bc-8d4b-0cbc5362aea0-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-dnmjr\" (UID: \"e10da470-8dec-47bc-8d4b-0cbc5362aea0\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dnmjr" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.542945 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bae0075d-69e4-48a2-91f2-a6e2df1529f0-config\") pod \"kube-apiserver-operator-766d6c64bb-v6kxk\" (UID: \"bae0075d-69e4-48a2-91f2-a6e2df1529f0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v6kxk" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.542971 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fef68bfa-20e4-4a83-8642-be28f0f0b31a-proxy-tls\") pod \"machine-config-operator-74547568cd-2lfhz\" (UID: \"fef68bfa-20e4-4a83-8642-be28f0f0b31a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2lfhz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.542986 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/b91e38cc-c852-407d-8efd-227f0bfaa5fb-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-n5jzk\" (UID: \"b91e38cc-c852-407d-8efd-227f0bfaa5fb\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n5jzk" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543002 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bae0075d-69e4-48a2-91f2-a6e2df1529f0-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-v6kxk\" (UID: \"bae0075d-69e4-48a2-91f2-a6e2df1529f0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v6kxk" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543016 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/dfee3432-722f-454a-bd40-36fcfe1fb935-webhook-cert\") pod \"packageserver-d55dfcdfc-sczzw\" (UID: \"dfee3432-722f-454a-bd40-36fcfe1fb935\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sczzw" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543031 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcdgq\" (UniqueName: \"kubernetes.io/projected/acdc25a7-3353-430f-b856-22a1259025ee-kube-api-access-wcdgq\") pod \"marketplace-operator-79b997595-hc84k\" (UID: \"acdc25a7-3353-430f-b856-22a1259025ee\") " pod="openshift-marketplace/marketplace-operator-79b997595-hc84k" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543048 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/846fc575-9b63-46b3-bd9d-dfc8ab0bf7a2-config\") pod \"service-ca-operator-777779d784-wtzqd\" (UID: \"846fc575-9b63-46b3-bd9d-dfc8ab0bf7a2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wtzqd" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543062 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c53fef56-513a-439c-a05a-cda26f9e3855-bound-sa-token\") pod \"ingress-operator-5b745b69d9-wqhm8\" (UID: \"c53fef56-513a-439c-a05a-cda26f9e3855\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-wqhm8" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543087 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/acdc25a7-3353-430f-b856-22a1259025ee-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hc84k\" (UID: \"acdc25a7-3353-430f-b856-22a1259025ee\") " pod="openshift-marketplace/marketplace-operator-79b997595-hc84k" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543111 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fef68bfa-20e4-4a83-8642-be28f0f0b31a-auth-proxy-config\") pod \"machine-config-operator-74547568cd-2lfhz\" (UID: \"fef68bfa-20e4-4a83-8642-be28f0f0b31a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2lfhz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543126 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/834344ed-4a8b-4a1e-be84-33622d21d0af-metrics-certs\") pod \"router-default-5444994796-6rw5f\" (UID: \"834344ed-4a8b-4a1e-be84-33622d21d0af\") " pod="openshift-ingress/router-default-5444994796-6rw5f" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543147 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xt4r8\" (UniqueName: \"kubernetes.io/projected/e10da470-8dec-47bc-8d4b-0cbc5362aea0-kube-api-access-xt4r8\") pod \"kube-storage-version-migrator-operator-b67b599dd-dnmjr\" (UID: \"e10da470-8dec-47bc-8d4b-0cbc5362aea0\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dnmjr" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543165 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fea8ba5e-73ec-4bce-99f6-72b720813d96-cert\") pod \"ingress-canary-2fqm7\" (UID: \"fea8ba5e-73ec-4bce-99f6-72b720813d96\") " pod="openshift-ingress-canary/ingress-canary-2fqm7" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543186 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rsrj\" (UniqueName: \"kubernetes.io/projected/f27fcf7d-17db-407a-b6ee-e34779332edf-kube-api-access-7rsrj\") pod \"control-plane-machine-set-operator-78cbb6b69f-2pmxc\" (UID: \"f27fcf7d-17db-407a-b6ee-e34779332edf\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2pmxc" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543203 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/796cc797-549b-40ee-8bf9-66a8d9b54703-srv-cert\") pod \"catalog-operator-68c6474976-6tqs2\" (UID: \"796cc797-549b-40ee-8bf9-66a8d9b54703\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6tqs2" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543218 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/0eff3533-ad12-4c4b-bd1e-6a92d09d2835-signing-cabundle\") pod \"service-ca-9c57cc56f-4nrzw\" (UID: \"0eff3533-ad12-4c4b-bd1e-6a92d09d2835\") " pod="openshift-service-ca/service-ca-9c57cc56f-4nrzw" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543242 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vk627\" (UniqueName: \"kubernetes.io/projected/0eff3533-ad12-4c4b-bd1e-6a92d09d2835-kube-api-access-vk627\") pod \"service-ca-9c57cc56f-4nrzw\" (UID: \"0eff3533-ad12-4c4b-bd1e-6a92d09d2835\") " pod="openshift-service-ca/service-ca-9c57cc56f-4nrzw" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543259 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwrvp\" (UniqueName: \"kubernetes.io/projected/0335170b-46ee-4cd3-aae5-694623192d49-kube-api-access-dwrvp\") pod \"collect-profiles-29320575-h29rv\" (UID: \"0335170b-46ee-4cd3-aae5-694623192d49\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320575-h29rv" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543275 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvvkt\" (UniqueName: \"kubernetes.io/projected/fea8ba5e-73ec-4bce-99f6-72b720813d96-kube-api-access-mvvkt\") pod \"ingress-canary-2fqm7\" (UID: \"fea8ba5e-73ec-4bce-99f6-72b720813d96\") " pod="openshift-ingress-canary/ingress-canary-2fqm7" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543292 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p89l8\" (UniqueName: \"kubernetes.io/projected/834344ed-4a8b-4a1e-be84-33622d21d0af-kube-api-access-p89l8\") pod \"router-default-5444994796-6rw5f\" (UID: \"834344ed-4a8b-4a1e-be84-33622d21d0af\") " pod="openshift-ingress/router-default-5444994796-6rw5f" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543309 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/364932b8-9721-41da-a1a3-7b9a1977da84-config\") pod \"kube-controller-manager-operator-78b949d7b-6w5w8\" (UID: \"364932b8-9721-41da-a1a3-7b9a1977da84\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6w5w8" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543324 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79qt4\" (UniqueName: \"kubernetes.io/projected/2c6f790c-415f-46c9-baba-251cdc1e14b2-kube-api-access-79qt4\") pod \"csi-hostpathplugin-znwgl\" (UID: \"2c6f790c-415f-46c9-baba-251cdc1e14b2\") " pod="hostpath-provisioner/csi-hostpathplugin-znwgl" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543341 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/834344ed-4a8b-4a1e-be84-33622d21d0af-default-certificate\") pod \"router-default-5444994796-6rw5f\" (UID: \"834344ed-4a8b-4a1e-be84-33622d21d0af\") " pod="openshift-ingress/router-default-5444994796-6rw5f" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543354 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/834344ed-4a8b-4a1e-be84-33622d21d0af-stats-auth\") pod \"router-default-5444994796-6rw5f\" (UID: \"834344ed-4a8b-4a1e-be84-33622d21d0af\") " pod="openshift-ingress/router-default-5444994796-6rw5f" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543369 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/2c6f790c-415f-46c9-baba-251cdc1e14b2-csi-data-dir\") pod \"csi-hostpathplugin-znwgl\" (UID: \"2c6f790c-415f-46c9-baba-251cdc1e14b2\") " pod="hostpath-provisioner/csi-hostpathplugin-znwgl" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543390 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmbjf\" (UniqueName: \"kubernetes.io/projected/846fc575-9b63-46b3-bd9d-dfc8ab0bf7a2-kube-api-access-fmbjf\") pod \"service-ca-operator-777779d784-wtzqd\" (UID: \"846fc575-9b63-46b3-bd9d-dfc8ab0bf7a2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wtzqd" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543403 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c53fef56-513a-439c-a05a-cda26f9e3855-trusted-ca\") pod \"ingress-operator-5b745b69d9-wqhm8\" (UID: \"c53fef56-513a-439c-a05a-cda26f9e3855\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-wqhm8" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543419 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/364932b8-9721-41da-a1a3-7b9a1977da84-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-6w5w8\" (UID: \"364932b8-9721-41da-a1a3-7b9a1977da84\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6w5w8" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543436 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/dfee3432-722f-454a-bd40-36fcfe1fb935-apiservice-cert\") pod \"packageserver-d55dfcdfc-sczzw\" (UID: \"dfee3432-722f-454a-bd40-36fcfe1fb935\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sczzw" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543455 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cae8a060-bc60-476a-9bee-11637282a23f-config-volume\") pod \"dns-default-h52ns\" (UID: \"cae8a060-bc60-476a-9bee-11637282a23f\") " pod="openshift-dns/dns-default-h52ns" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543476 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/0eff3533-ad12-4c4b-bd1e-6a92d09d2835-signing-key\") pod \"service-ca-9c57cc56f-4nrzw\" (UID: \"0eff3533-ad12-4c4b-bd1e-6a92d09d2835\") " pod="openshift-service-ca/service-ca-9c57cc56f-4nrzw" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543490 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpjhv\" (UniqueName: \"kubernetes.io/projected/89d8a136-01a4-487f-8160-9a0cce584cec-kube-api-access-jpjhv\") pod \"migrator-59844c95c7-l9vch\" (UID: \"89d8a136-01a4-487f-8160-9a0cce584cec\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-l9vch" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543518 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvwsk\" (UniqueName: \"kubernetes.io/projected/dfee3432-722f-454a-bd40-36fcfe1fb935-kube-api-access-jvwsk\") pod \"packageserver-d55dfcdfc-sczzw\" (UID: \"dfee3432-722f-454a-bd40-36fcfe1fb935\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sczzw" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543537 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ml679\" (UniqueName: \"kubernetes.io/projected/1fa6d0bc-ffc5-4974-bb50-123dd96187bd-kube-api-access-ml679\") pod \"machine-config-controller-84d6567774-djckq\" (UID: \"1fa6d0bc-ffc5-4974-bb50-123dd96187bd\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djckq" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543554 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/2c6f790c-415f-46c9-baba-251cdc1e14b2-mountpoint-dir\") pod \"csi-hostpathplugin-znwgl\" (UID: \"2c6f790c-415f-46c9-baba-251cdc1e14b2\") " pod="hostpath-provisioner/csi-hostpathplugin-znwgl" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543574 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5f1cdbf0-65a0-4621-a4de-2337b8151bd3-metrics-tls\") pod \"dns-operator-744455d44c-dgbs4\" (UID: \"5f1cdbf0-65a0-4621-a4de-2337b8151bd3\") " pod="openshift-dns-operator/dns-operator-744455d44c-dgbs4" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543588 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/c53fef56-513a-439c-a05a-cda26f9e3855-metrics-tls\") pod \"ingress-operator-5b745b69d9-wqhm8\" (UID: \"c53fef56-513a-439c-a05a-cda26f9e3855\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-wqhm8" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543602 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/364932b8-9721-41da-a1a3-7b9a1977da84-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-6w5w8\" (UID: \"364932b8-9721-41da-a1a3-7b9a1977da84\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6w5w8" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543618 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lc5bz\" (UniqueName: \"kubernetes.io/projected/f1a5276b-5811-4a19-8d31-a46a1c787d8a-kube-api-access-lc5bz\") pod \"package-server-manager-789f6589d5-2jxnc\" (UID: \"f1a5276b-5811-4a19-8d31-a46a1c787d8a\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2jxnc" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543632 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n459d\" (UniqueName: \"kubernetes.io/projected/796cc797-549b-40ee-8bf9-66a8d9b54703-kube-api-access-n459d\") pod \"catalog-operator-68c6474976-6tqs2\" (UID: \"796cc797-549b-40ee-8bf9-66a8d9b54703\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6tqs2" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543649 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/fef68bfa-20e4-4a83-8642-be28f0f0b31a-images\") pod \"machine-config-operator-74547568cd-2lfhz\" (UID: \"fef68bfa-20e4-4a83-8642-be28f0f0b31a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2lfhz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543664 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2rjz\" (UniqueName: \"kubernetes.io/projected/b282f088-14fa-47c0-a8a8-1285a8a7c296-kube-api-access-s2rjz\") pod \"multus-admission-controller-857f4d67dd-88xml\" (UID: \"b282f088-14fa-47c0-a8a8-1285a8a7c296\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-88xml" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543680 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/f1a5276b-5811-4a19-8d31-a46a1c787d8a-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-2jxnc\" (UID: \"f1a5276b-5811-4a19-8d31-a46a1c787d8a\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2jxnc" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543700 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543715 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/cae8a060-bc60-476a-9bee-11637282a23f-metrics-tls\") pod \"dns-default-h52ns\" (UID: \"cae8a060-bc60-476a-9bee-11637282a23f\") " pod="openshift-dns/dns-default-h52ns" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543732 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b282f088-14fa-47c0-a8a8-1285a8a7c296-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-88xml\" (UID: \"b282f088-14fa-47c0-a8a8-1285a8a7c296\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-88xml" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543749 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bae0075d-69e4-48a2-91f2-a6e2df1529f0-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-v6kxk\" (UID: \"bae0075d-69e4-48a2-91f2-a6e2df1529f0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v6kxk" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543765 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hns4t\" (UniqueName: \"kubernetes.io/projected/c53fef56-513a-439c-a05a-cda26f9e3855-kube-api-access-hns4t\") pod \"ingress-operator-5b745b69d9-wqhm8\" (UID: \"c53fef56-513a-439c-a05a-cda26f9e3855\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-wqhm8" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543780 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/2c6f790c-415f-46c9-baba-251cdc1e14b2-plugins-dir\") pod \"csi-hostpathplugin-znwgl\" (UID: \"2c6f790c-415f-46c9-baba-251cdc1e14b2\") " pod="hostpath-provisioner/csi-hostpathplugin-znwgl" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543798 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/07d72c87-0749-4f19-b53f-7733b07cc149-srv-cert\") pod \"olm-operator-6b444d44fb-9kzms\" (UID: \"07d72c87-0749-4f19-b53f-7733b07cc149\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9kzms" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543812 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/2c6f790c-415f-46c9-baba-251cdc1e14b2-socket-dir\") pod \"csi-hostpathplugin-znwgl\" (UID: \"2c6f790c-415f-46c9-baba-251cdc1e14b2\") " pod="hostpath-provisioner/csi-hostpathplugin-znwgl" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543829 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/07d72c87-0749-4f19-b53f-7733b07cc149-profile-collector-cert\") pod \"olm-operator-6b444d44fb-9kzms\" (UID: \"07d72c87-0749-4f19-b53f-7733b07cc149\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9kzms" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543844 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9t94x\" (UniqueName: \"kubernetes.io/projected/5f1cdbf0-65a0-4621-a4de-2337b8151bd3-kube-api-access-9t94x\") pod \"dns-operator-744455d44c-dgbs4\" (UID: \"5f1cdbf0-65a0-4621-a4de-2337b8151bd3\") " pod="openshift-dns-operator/dns-operator-744455d44c-dgbs4" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543869 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0335170b-46ee-4cd3-aae5-694623192d49-config-volume\") pod \"collect-profiles-29320575-h29rv\" (UID: \"0335170b-46ee-4cd3-aae5-694623192d49\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320575-h29rv" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543884 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/454fbe1a-1b3c-46e8-855c-a2dd62ea2b9e-node-bootstrap-token\") pod \"machine-config-server-rps48\" (UID: \"454fbe1a-1b3c-46e8-855c-a2dd62ea2b9e\") " pod="openshift-machine-config-operator/machine-config-server-rps48" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543901 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/834344ed-4a8b-4a1e-be84-33622d21d0af-service-ca-bundle\") pod \"router-default-5444994796-6rw5f\" (UID: \"834344ed-4a8b-4a1e-be84-33622d21d0af\") " pod="openshift-ingress/router-default-5444994796-6rw5f" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543935 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b1374f1-9722-4483-b976-775bdd8cb65a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-k6j67\" (UID: \"5b1374f1-9722-4483-b976-775bdd8cb65a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k6j67" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543949 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0335170b-46ee-4cd3-aae5-694623192d49-secret-volume\") pod \"collect-profiles-29320575-h29rv\" (UID: \"0335170b-46ee-4cd3-aae5-694623192d49\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320575-h29rv" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543964 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82tzc\" (UniqueName: \"kubernetes.io/projected/b91e38cc-c852-407d-8efd-227f0bfaa5fb-kube-api-access-82tzc\") pod \"cluster-samples-operator-665b6dd947-n5jzk\" (UID: \"b91e38cc-c852-407d-8efd-227f0bfaa5fb\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n5jzk" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.543979 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/acdc25a7-3353-430f-b856-22a1259025ee-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hc84k\" (UID: \"acdc25a7-3353-430f-b856-22a1259025ee\") " pod="openshift-marketplace/marketplace-operator-79b997595-hc84k" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.544002 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6ld9\" (UniqueName: \"kubernetes.io/projected/07d72c87-0749-4f19-b53f-7733b07cc149-kube-api-access-j6ld9\") pod \"olm-operator-6b444d44fb-9kzms\" (UID: \"07d72c87-0749-4f19-b53f-7733b07cc149\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9kzms" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.544025 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjxqj\" (UniqueName: \"kubernetes.io/projected/fef68bfa-20e4-4a83-8642-be28f0f0b31a-kube-api-access-tjxqj\") pod \"machine-config-operator-74547568cd-2lfhz\" (UID: \"fef68bfa-20e4-4a83-8642-be28f0f0b31a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2lfhz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.544040 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/2c6f790c-415f-46c9-baba-251cdc1e14b2-registration-dir\") pod \"csi-hostpathplugin-znwgl\" (UID: \"2c6f790c-415f-46c9-baba-251cdc1e14b2\") " pod="hostpath-provisioner/csi-hostpathplugin-znwgl" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.544055 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/f27fcf7d-17db-407a-b6ee-e34779332edf-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-2pmxc\" (UID: \"f27fcf7d-17db-407a-b6ee-e34779332edf\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2pmxc" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.544072 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1fa6d0bc-ffc5-4974-bb50-123dd96187bd-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-djckq\" (UID: \"1fa6d0bc-ffc5-4974-bb50-123dd96187bd\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djckq" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.544088 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/796cc797-549b-40ee-8bf9-66a8d9b54703-profile-collector-cert\") pod \"catalog-operator-68c6474976-6tqs2\" (UID: \"796cc797-549b-40ee-8bf9-66a8d9b54703\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6tqs2" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.544113 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8t6nq\" (UniqueName: \"kubernetes.io/projected/cae8a060-bc60-476a-9bee-11637282a23f-kube-api-access-8t6nq\") pod \"dns-default-h52ns\" (UID: \"cae8a060-bc60-476a-9bee-11637282a23f\") " pod="openshift-dns/dns-default-h52ns" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.545082 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/dfee3432-722f-454a-bd40-36fcfe1fb935-tmpfs\") pod \"packageserver-d55dfcdfc-sczzw\" (UID: \"dfee3432-722f-454a-bd40-36fcfe1fb935\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sczzw" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.546603 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1fa6d0bc-ffc5-4974-bb50-123dd96187bd-proxy-tls\") pod \"machine-config-controller-84d6567774-djckq\" (UID: \"1fa6d0bc-ffc5-4974-bb50-123dd96187bd\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djckq" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.547430 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e10da470-8dec-47bc-8d4b-0cbc5362aea0-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-dnmjr\" (UID: \"e10da470-8dec-47bc-8d4b-0cbc5362aea0\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dnmjr" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.547899 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5b1374f1-9722-4483-b976-775bdd8cb65a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-k6j67\" (UID: \"5b1374f1-9722-4483-b976-775bdd8cb65a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k6j67" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.548029 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/2c6f790c-415f-46c9-baba-251cdc1e14b2-socket-dir\") pod \"csi-hostpathplugin-znwgl\" (UID: \"2c6f790c-415f-46c9-baba-251cdc1e14b2\") " pod="hostpath-provisioner/csi-hostpathplugin-znwgl" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.548482 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8dkm\" (UniqueName: \"kubernetes.io/projected/325572f4-db9f-40a9-bd48-df93e3ec42ed-kube-api-access-q8dkm\") pod \"cluster-image-registry-operator-dc59b4c8b-ngjm2\" (UID: \"325572f4-db9f-40a9-bd48-df93e3ec42ed\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ngjm2" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.548802 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/454fbe1a-1b3c-46e8-855c-a2dd62ea2b9e-certs\") pod \"machine-config-server-rps48\" (UID: \"454fbe1a-1b3c-46e8-855c-a2dd62ea2b9e\") " pod="openshift-machine-config-operator/machine-config-server-rps48" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.549220 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/fef68bfa-20e4-4a83-8642-be28f0f0b31a-images\") pod \"machine-config-operator-74547568cd-2lfhz\" (UID: \"fef68bfa-20e4-4a83-8642-be28f0f0b31a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2lfhz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.549464 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/2c6f790c-415f-46c9-baba-251cdc1e14b2-mountpoint-dir\") pod \"csi-hostpathplugin-znwgl\" (UID: \"2c6f790c-415f-46c9-baba-251cdc1e14b2\") " pod="hostpath-provisioner/csi-hostpathplugin-znwgl" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.549518 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/0eff3533-ad12-4c4b-bd1e-6a92d09d2835-signing-cabundle\") pod \"service-ca-9c57cc56f-4nrzw\" (UID: \"0eff3533-ad12-4c4b-bd1e-6a92d09d2835\") " pod="openshift-service-ca/service-ca-9c57cc56f-4nrzw" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.550779 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0335170b-46ee-4cd3-aae5-694623192d49-config-volume\") pod \"collect-profiles-29320575-h29rv\" (UID: \"0335170b-46ee-4cd3-aae5-694623192d49\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320575-h29rv" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.551489 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/2c6f790c-415f-46c9-baba-251cdc1e14b2-plugins-dir\") pod \"csi-hostpathplugin-znwgl\" (UID: \"2c6f790c-415f-46c9-baba-251cdc1e14b2\") " pod="hostpath-provisioner/csi-hostpathplugin-znwgl" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.552841 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bae0075d-69e4-48a2-91f2-a6e2df1529f0-config\") pod \"kube-apiserver-operator-766d6c64bb-v6kxk\" (UID: \"bae0075d-69e4-48a2-91f2-a6e2df1529f0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v6kxk" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.554163 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/364932b8-9721-41da-a1a3-7b9a1977da84-config\") pod \"kube-controller-manager-operator-78b949d7b-6w5w8\" (UID: \"364932b8-9721-41da-a1a3-7b9a1977da84\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6w5w8" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.554723 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b1374f1-9722-4483-b976-775bdd8cb65a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-k6j67\" (UID: \"5b1374f1-9722-4483-b976-775bdd8cb65a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k6j67" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.555284 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/834344ed-4a8b-4a1e-be84-33622d21d0af-service-ca-bundle\") pod \"router-default-5444994796-6rw5f\" (UID: \"834344ed-4a8b-4a1e-be84-33622d21d0af\") " pod="openshift-ingress/router-default-5444994796-6rw5f" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.555486 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/07d72c87-0749-4f19-b53f-7733b07cc149-profile-collector-cert\") pod \"olm-operator-6b444d44fb-9kzms\" (UID: \"07d72c87-0749-4f19-b53f-7733b07cc149\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9kzms" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.557793 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e10da470-8dec-47bc-8d4b-0cbc5362aea0-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-dnmjr\" (UID: \"e10da470-8dec-47bc-8d4b-0cbc5362aea0\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dnmjr" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.558922 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/07d72c87-0749-4f19-b53f-7733b07cc149-srv-cert\") pod \"olm-operator-6b444d44fb-9kzms\" (UID: \"07d72c87-0749-4f19-b53f-7733b07cc149\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9kzms" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.559544 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/c53fef56-513a-439c-a05a-cda26f9e3855-metrics-tls\") pod \"ingress-operator-5b745b69d9-wqhm8\" (UID: \"c53fef56-513a-439c-a05a-cda26f9e3855\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-wqhm8" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.560474 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/acdc25a7-3353-430f-b856-22a1259025ee-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hc84k\" (UID: \"acdc25a7-3353-430f-b856-22a1259025ee\") " pod="openshift-marketplace/marketplace-operator-79b997595-hc84k" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.560677 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/2c6f790c-415f-46c9-baba-251cdc1e14b2-registration-dir\") pod \"csi-hostpathplugin-znwgl\" (UID: \"2c6f790c-415f-46c9-baba-251cdc1e14b2\") " pod="hostpath-provisioner/csi-hostpathplugin-znwgl" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.561911 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2mv7\" (UniqueName: \"kubernetes.io/projected/ccd47538-6f91-4c6d-91b0-afccf0c83b20-kube-api-access-m2mv7\") pod \"machine-api-operator-5694c8668f-fp74c\" (UID: \"ccd47538-6f91-4c6d-91b0-afccf0c83b20\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fp74c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.563063 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/acdc25a7-3353-430f-b856-22a1259025ee-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hc84k\" (UID: \"acdc25a7-3353-430f-b856-22a1259025ee\") " pod="openshift-marketplace/marketplace-operator-79b997595-hc84k" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.563066 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/834344ed-4a8b-4a1e-be84-33622d21d0af-default-certificate\") pod \"router-default-5444994796-6rw5f\" (UID: \"834344ed-4a8b-4a1e-be84-33622d21d0af\") " pod="openshift-ingress/router-default-5444994796-6rw5f" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.563106 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/364932b8-9721-41da-a1a3-7b9a1977da84-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-6w5w8\" (UID: \"364932b8-9721-41da-a1a3-7b9a1977da84\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6w5w8" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.563219 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/2c6f790c-415f-46c9-baba-251cdc1e14b2-csi-data-dir\") pod \"csi-hostpathplugin-znwgl\" (UID: \"2c6f790c-415f-46c9-baba-251cdc1e14b2\") " pod="hostpath-provisioner/csi-hostpathplugin-znwgl" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.563452 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/834344ed-4a8b-4a1e-be84-33622d21d0af-stats-auth\") pod \"router-default-5444994796-6rw5f\" (UID: \"834344ed-4a8b-4a1e-be84-33622d21d0af\") " pod="openshift-ingress/router-default-5444994796-6rw5f" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.563762 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b282f088-14fa-47c0-a8a8-1285a8a7c296-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-88xml\" (UID: \"b282f088-14fa-47c0-a8a8-1285a8a7c296\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-88xml" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.564990 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c53fef56-513a-439c-a05a-cda26f9e3855-trusted-ca\") pod \"ingress-operator-5b745b69d9-wqhm8\" (UID: \"c53fef56-513a-439c-a05a-cda26f9e3855\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-wqhm8" Sep 30 12:22:48 crc kubenswrapper[5002]: E0930 12:22:48.565045 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:49.06503401 +0000 UTC m=+143.314716156 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.565313 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cae8a060-bc60-476a-9bee-11637282a23f-config-volume\") pod \"dns-default-h52ns\" (UID: \"cae8a060-bc60-476a-9bee-11637282a23f\") " pod="openshift-dns/dns-default-h52ns" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.569530 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1fa6d0bc-ffc5-4974-bb50-123dd96187bd-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-djckq\" (UID: \"1fa6d0bc-ffc5-4974-bb50-123dd96187bd\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djckq" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.575005 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fef68bfa-20e4-4a83-8642-be28f0f0b31a-auth-proxy-config\") pod \"machine-config-operator-74547568cd-2lfhz\" (UID: \"fef68bfa-20e4-4a83-8642-be28f0f0b31a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2lfhz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.575968 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/846fc575-9b63-46b3-bd9d-dfc8ab0bf7a2-serving-cert\") pod \"service-ca-operator-777779d784-wtzqd\" (UID: \"846fc575-9b63-46b3-bd9d-dfc8ab0bf7a2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wtzqd" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.576224 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5f1cdbf0-65a0-4621-a4de-2337b8151bd3-metrics-tls\") pod \"dns-operator-744455d44c-dgbs4\" (UID: \"5f1cdbf0-65a0-4621-a4de-2337b8151bd3\") " pod="openshift-dns-operator/dns-operator-744455d44c-dgbs4" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.579450 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/454fbe1a-1b3c-46e8-855c-a2dd62ea2b9e-node-bootstrap-token\") pod \"machine-config-server-rps48\" (UID: \"454fbe1a-1b3c-46e8-855c-a2dd62ea2b9e\") " pod="openshift-machine-config-operator/machine-config-server-rps48" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.581461 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/846fc575-9b63-46b3-bd9d-dfc8ab0bf7a2-config\") pod \"service-ca-operator-777779d784-wtzqd\" (UID: \"846fc575-9b63-46b3-bd9d-dfc8ab0bf7a2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wtzqd" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.571819 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/f27fcf7d-17db-407a-b6ee-e34779332edf-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-2pmxc\" (UID: \"f27fcf7d-17db-407a-b6ee-e34779332edf\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2pmxc" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.583172 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/f1a5276b-5811-4a19-8d31-a46a1c787d8a-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-2jxnc\" (UID: \"f1a5276b-5811-4a19-8d31-a46a1c787d8a\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2jxnc" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.583451 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/dfee3432-722f-454a-bd40-36fcfe1fb935-apiservice-cert\") pod \"packageserver-d55dfcdfc-sczzw\" (UID: \"dfee3432-722f-454a-bd40-36fcfe1fb935\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sczzw" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.585051 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/b91e38cc-c852-407d-8efd-227f0bfaa5fb-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-n5jzk\" (UID: \"b91e38cc-c852-407d-8efd-227f0bfaa5fb\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n5jzk" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.585594 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h7jgw" event={"ID":"8978cad0-f5e7-4cd8-a862-225087df083e","Type":"ContainerStarted","Data":"b7216b1a0af3fc82c89cb89d96d3120ecf96d613f8d4bff055550a08939b78cf"} Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.588302 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/cae8a060-bc60-476a-9bee-11637282a23f-metrics-tls\") pod \"dns-default-h52ns\" (UID: \"cae8a060-bc60-476a-9bee-11637282a23f\") " pod="openshift-dns/dns-default-h52ns" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.588455 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bae0075d-69e4-48a2-91f2-a6e2df1529f0-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-v6kxk\" (UID: \"bae0075d-69e4-48a2-91f2-a6e2df1529f0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v6kxk" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.588614 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fea8ba5e-73ec-4bce-99f6-72b720813d96-cert\") pod \"ingress-canary-2fqm7\" (UID: \"fea8ba5e-73ec-4bce-99f6-72b720813d96\") " pod="openshift-ingress-canary/ingress-canary-2fqm7" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.588715 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/796cc797-549b-40ee-8bf9-66a8d9b54703-profile-collector-cert\") pod \"catalog-operator-68c6474976-6tqs2\" (UID: \"796cc797-549b-40ee-8bf9-66a8d9b54703\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6tqs2" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.590392 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-z66g4" event={"ID":"f4580810-3604-47b5-8a68-d337b0890b48","Type":"ContainerStarted","Data":"4e825a111204eff9a159c5e6ef2444fcf1f905d4ad3bc51f735abbcc33e16453"} Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.590415 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-z66g4" event={"ID":"f4580810-3604-47b5-8a68-d337b0890b48","Type":"ContainerStarted","Data":"4825cb02c5cdafcb27640a45448f58ec8e0e5c4a4e1e764de7f0ddc7449b01c5"} Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.590946 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/834344ed-4a8b-4a1e-be84-33622d21d0af-metrics-certs\") pod \"router-default-5444994796-6rw5f\" (UID: \"834344ed-4a8b-4a1e-be84-33622d21d0af\") " pod="openshift-ingress/router-default-5444994796-6rw5f" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.592426 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/796cc797-549b-40ee-8bf9-66a8d9b54703-srv-cert\") pod \"catalog-operator-68c6474976-6tqs2\" (UID: \"796cc797-549b-40ee-8bf9-66a8d9b54703\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6tqs2" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.592682 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7rp6\" (UniqueName: \"kubernetes.io/projected/54790eef-93bc-4d88-bcc5-3a2f6e1c1fa5-kube-api-access-k7rp6\") pod \"downloads-7954f5f757-tdxkp\" (UID: \"54790eef-93bc-4d88-bcc5-3a2f6e1c1fa5\") " pod="openshift-console/downloads-7954f5f757-tdxkp" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.592719 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/0eff3533-ad12-4c4b-bd1e-6a92d09d2835-signing-key\") pod \"service-ca-9c57cc56f-4nrzw\" (UID: \"0eff3533-ad12-4c4b-bd1e-6a92d09d2835\") " pod="openshift-service-ca/service-ca-9c57cc56f-4nrzw" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.594780 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/dfee3432-722f-454a-bd40-36fcfe1fb935-webhook-cert\") pod \"packageserver-d55dfcdfc-sczzw\" (UID: \"dfee3432-722f-454a-bd40-36fcfe1fb935\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sczzw" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.595326 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-p582f" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.596150 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0335170b-46ee-4cd3-aae5-694623192d49-secret-volume\") pod \"collect-profiles-29320575-h29rv\" (UID: \"0335170b-46ee-4cd3-aae5-694623192d49\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320575-h29rv" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.596698 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vgd9l\" (UniqueName: \"kubernetes.io/projected/048223bb-4ff9-41e0-9b78-457a720ff399-kube-api-access-vgd9l\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.602407 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fef68bfa-20e4-4a83-8642-be28f0f0b31a-proxy-tls\") pod \"machine-config-operator-74547568cd-2lfhz\" (UID: \"fef68bfa-20e4-4a83-8642-be28f0f0b31a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2lfhz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.610956 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-trnhx"] Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.611724 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qc7nq\" (UniqueName: \"kubernetes.io/projected/7151e9d9-4417-40bb-aac5-8f838065aa79-kube-api-access-qc7nq\") pod \"console-f9d7485db-79sft\" (UID: \"7151e9d9-4417-40bb-aac5-8f838065aa79\") " pod="openshift-console/console-f9d7485db-79sft" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.614746 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-tdxkp" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.620641 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ngjm2" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.640679 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p26xr\" (UniqueName: \"kubernetes.io/projected/f7cb4999-7580-47a2-a3e6-764d49684eaf-kube-api-access-p26xr\") pod \"etcd-operator-b45778765-lv4mz\" (UID: \"f7cb4999-7580-47a2-a3e6-764d49684eaf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lv4mz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.644302 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.644718 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:48 crc kubenswrapper[5002]: E0930 12:22:48.646234 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:49.146207037 +0000 UTC m=+143.395889263 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.647269 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-79sft" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.651198 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5d8qh\" (UniqueName: \"kubernetes.io/projected/14158f4f-292f-4693-9ed2-c7fa97452ecb-kube-api-access-5d8qh\") pod \"openshift-config-operator-7777fb866f-hns42\" (UID: \"14158f4f-292f-4693-9ed2-c7fa97452ecb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hns42" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.673237 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-8m9sz"] Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.693373 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-lv4mz" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.702574 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwmsq\" (UniqueName: \"kubernetes.io/projected/e536d056-a018-4d58-a85a-24d70c2116fc-kube-api-access-bwmsq\") pod \"authentication-operator-69f744f599-xnv2t\" (UID: \"e536d056-a018-4d58-a85a-24d70c2116fc\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xnv2t" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.723477 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8t6nq\" (UniqueName: \"kubernetes.io/projected/cae8a060-bc60-476a-9bee-11637282a23f-kube-api-access-8t6nq\") pod \"dns-default-h52ns\" (UID: \"cae8a060-bc60-476a-9bee-11637282a23f\") " pod="openshift-dns/dns-default-h52ns" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.727862 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8kz2p"] Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.734391 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvwsk\" (UniqueName: \"kubernetes.io/projected/dfee3432-722f-454a-bd40-36fcfe1fb935-kube-api-access-jvwsk\") pod \"packageserver-d55dfcdfc-sczzw\" (UID: \"dfee3432-722f-454a-bd40-36fcfe1fb935\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sczzw" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.747424 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:48 crc kubenswrapper[5002]: E0930 12:22:48.747873 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:49.247857048 +0000 UTC m=+143.497539194 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.750270 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-fp74c" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.763942 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tw4gl\" (UniqueName: \"kubernetes.io/projected/454fbe1a-1b3c-46e8-855c-a2dd62ea2b9e-kube-api-access-tw4gl\") pod \"machine-config-server-rps48\" (UID: \"454fbe1a-1b3c-46e8-855c-a2dd62ea2b9e\") " pod="openshift-machine-config-operator/machine-config-server-rps48" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.773576 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v"] Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.799268 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.814230 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5b1374f1-9722-4483-b976-775bdd8cb65a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-k6j67\" (UID: \"5b1374f1-9722-4483-b976-775bdd8cb65a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k6j67" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.825997 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bae0075d-69e4-48a2-91f2-a6e2df1529f0-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-v6kxk\" (UID: \"bae0075d-69e4-48a2-91f2-a6e2df1529f0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v6kxk" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.833018 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hns4t\" (UniqueName: \"kubernetes.io/projected/c53fef56-513a-439c-a05a-cda26f9e3855-kube-api-access-hns4t\") pod \"ingress-operator-5b745b69d9-wqhm8\" (UID: \"c53fef56-513a-439c-a05a-cda26f9e3855\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-wqhm8" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.845954 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/364932b8-9721-41da-a1a3-7b9a1977da84-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-6w5w8\" (UID: \"364932b8-9721-41da-a1a3-7b9a1977da84\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6w5w8" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.848090 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:48 crc kubenswrapper[5002]: E0930 12:22:48.848622 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:49.348605865 +0000 UTC m=+143.598288011 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.855089 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lc5bz\" (UniqueName: \"kubernetes.io/projected/f1a5276b-5811-4a19-8d31-a46a1c787d8a-kube-api-access-lc5bz\") pod \"package-server-manager-789f6589d5-2jxnc\" (UID: \"f1a5276b-5811-4a19-8d31-a46a1c787d8a\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2jxnc" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.855572 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hns42" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.862726 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sczzw" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.868096 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-h52ns" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.873726 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-xnv2t" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.878188 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n459d\" (UniqueName: \"kubernetes.io/projected/796cc797-549b-40ee-8bf9-66a8d9b54703-kube-api-access-n459d\") pod \"catalog-operator-68c6474976-6tqs2\" (UID: \"796cc797-549b-40ee-8bf9-66a8d9b54703\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6tqs2" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.905227 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ml679\" (UniqueName: \"kubernetes.io/projected/1fa6d0bc-ffc5-4974-bb50-123dd96187bd-kube-api-access-ml679\") pod \"machine-config-controller-84d6567774-djckq\" (UID: \"1fa6d0bc-ffc5-4974-bb50-123dd96187bd\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djckq" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.905981 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-rps48" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.946643 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82tzc\" (UniqueName: \"kubernetes.io/projected/b91e38cc-c852-407d-8efd-227f0bfaa5fb-kube-api-access-82tzc\") pod \"cluster-samples-operator-665b6dd947-n5jzk\" (UID: \"b91e38cc-c852-407d-8efd-227f0bfaa5fb\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n5jzk" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.951103 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:48 crc kubenswrapper[5002]: E0930 12:22:48.951609 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:49.451596663 +0000 UTC m=+143.701278809 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.951945 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9t94x\" (UniqueName: \"kubernetes.io/projected/5f1cdbf0-65a0-4621-a4de-2337b8151bd3-kube-api-access-9t94x\") pod \"dns-operator-744455d44c-dgbs4\" (UID: \"5f1cdbf0-65a0-4621-a4de-2337b8151bd3\") " pod="openshift-dns-operator/dns-operator-744455d44c-dgbs4" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.979445 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p89l8\" (UniqueName: \"kubernetes.io/projected/834344ed-4a8b-4a1e-be84-33622d21d0af-kube-api-access-p89l8\") pod \"router-default-5444994796-6rw5f\" (UID: \"834344ed-4a8b-4a1e-be84-33622d21d0af\") " pod="openshift-ingress/router-default-5444994796-6rw5f" Sep 30 12:22:48 crc kubenswrapper[5002]: I0930 12:22:48.985615 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79qt4\" (UniqueName: \"kubernetes.io/projected/2c6f790c-415f-46c9-baba-251cdc1e14b2-kube-api-access-79qt4\") pod \"csi-hostpathplugin-znwgl\" (UID: \"2c6f790c-415f-46c9-baba-251cdc1e14b2\") " pod="hostpath-provisioner/csi-hostpathplugin-znwgl" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.001520 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v6kxk" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.006893 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-6rw5f" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.011445 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6w5w8" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.018616 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vk627\" (UniqueName: \"kubernetes.io/projected/0eff3533-ad12-4c4b-bd1e-6a92d09d2835-kube-api-access-vk627\") pod \"service-ca-9c57cc56f-4nrzw\" (UID: \"0eff3533-ad12-4c4b-bd1e-6a92d09d2835\") " pod="openshift-service-ca/service-ca-9c57cc56f-4nrzw" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.020427 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n5jzk" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.024209 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6ld9\" (UniqueName: \"kubernetes.io/projected/07d72c87-0749-4f19-b53f-7733b07cc149-kube-api-access-j6ld9\") pod \"olm-operator-6b444d44fb-9kzms\" (UID: \"07d72c87-0749-4f19-b53f-7733b07cc149\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9kzms" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.042538 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjxqj\" (UniqueName: \"kubernetes.io/projected/fef68bfa-20e4-4a83-8642-be28f0f0b31a-kube-api-access-tjxqj\") pod \"machine-config-operator-74547568cd-2lfhz\" (UID: \"fef68bfa-20e4-4a83-8642-be28f0f0b31a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2lfhz" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.049327 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2jxnc" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.065794 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:49 crc kubenswrapper[5002]: E0930 12:22:49.065882 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:49.565858488 +0000 UTC m=+143.815540634 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.065946 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:49 crc kubenswrapper[5002]: E0930 12:22:49.066304 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:49.56629258 +0000 UTC m=+143.815974726 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.066531 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmbjf\" (UniqueName: \"kubernetes.io/projected/846fc575-9b63-46b3-bd9d-dfc8ab0bf7a2-kube-api-access-fmbjf\") pod \"service-ca-operator-777779d784-wtzqd\" (UID: \"846fc575-9b63-46b3-bd9d-dfc8ab0bf7a2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wtzqd" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.072458 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k6j67" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.101038 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djckq" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.107146 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpjhv\" (UniqueName: \"kubernetes.io/projected/89d8a136-01a4-487f-8160-9a0cce584cec-kube-api-access-jpjhv\") pod \"migrator-59844c95c7-l9vch\" (UID: \"89d8a136-01a4-487f-8160-9a0cce584cec\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-l9vch" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.107578 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6tqs2" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.114666 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-dgbs4" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.125493 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9kzms" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.137948 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2rjz\" (UniqueName: \"kubernetes.io/projected/b282f088-14fa-47c0-a8a8-1285a8a7c296-kube-api-access-s2rjz\") pod \"multus-admission-controller-857f4d67dd-88xml\" (UID: \"b282f088-14fa-47c0-a8a8-1285a8a7c296\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-88xml" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.141814 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wtzqd" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.148779 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-4nrzw" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.149601 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcdgq\" (UniqueName: \"kubernetes.io/projected/acdc25a7-3353-430f-b856-22a1259025ee-kube-api-access-wcdgq\") pod \"marketplace-operator-79b997595-hc84k\" (UID: \"acdc25a7-3353-430f-b856-22a1259025ee\") " pod="openshift-marketplace/marketplace-operator-79b997595-hc84k" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.156628 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rsrj\" (UniqueName: \"kubernetes.io/projected/f27fcf7d-17db-407a-b6ee-e34779332edf-kube-api-access-7rsrj\") pod \"control-plane-machine-set-operator-78cbb6b69f-2pmxc\" (UID: \"f27fcf7d-17db-407a-b6ee-e34779332edf\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2pmxc" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.174575 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:49 crc kubenswrapper[5002]: E0930 12:22:49.174914 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:49.674900307 +0000 UTC m=+143.924582453 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.182678 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ngjm2"] Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.190532 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-znwgl" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.191306 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xt4r8\" (UniqueName: \"kubernetes.io/projected/e10da470-8dec-47bc-8d4b-0cbc5362aea0-kube-api-access-xt4r8\") pod \"kube-storage-version-migrator-operator-b67b599dd-dnmjr\" (UID: \"e10da470-8dec-47bc-8d4b-0cbc5362aea0\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dnmjr" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.200161 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c53fef56-513a-439c-a05a-cda26f9e3855-bound-sa-token\") pod \"ingress-operator-5b745b69d9-wqhm8\" (UID: \"c53fef56-513a-439c-a05a-cda26f9e3855\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-wqhm8" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.201888 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwrvp\" (UniqueName: \"kubernetes.io/projected/0335170b-46ee-4cd3-aae5-694623192d49-kube-api-access-dwrvp\") pod \"collect-profiles-29320575-h29rv\" (UID: \"0335170b-46ee-4cd3-aae5-694623192d49\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320575-h29rv" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.211231 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-tdxkp"] Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.232753 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvvkt\" (UniqueName: \"kubernetes.io/projected/fea8ba5e-73ec-4bce-99f6-72b720813d96-kube-api-access-mvvkt\") pod \"ingress-canary-2fqm7\" (UID: \"fea8ba5e-73ec-4bce-99f6-72b720813d96\") " pod="openshift-ingress-canary/ingress-canary-2fqm7" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.276245 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:49 crc kubenswrapper[5002]: E0930 12:22:49.276534 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:49.776522677 +0000 UTC m=+144.026204823 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.335520 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2lfhz" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.340880 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-wqhm8" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.354800 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-88xml" Sep 30 12:22:49 crc kubenswrapper[5002]: W0930 12:22:49.355941 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod325572f4_db9f_40a9_bd48_df93e3ec42ed.slice/crio-e50cf97c9468b2b2fe1cb7d955931cb1815b7d273eaa87ce7f344c596e125626 WatchSource:0}: Error finding container e50cf97c9468b2b2fe1cb7d955931cb1815b7d273eaa87ce7f344c596e125626: Status 404 returned error can't find the container with id e50cf97c9468b2b2fe1cb7d955931cb1815b7d273eaa87ce7f344c596e125626 Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.363134 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dnmjr" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.377341 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:49 crc kubenswrapper[5002]: E0930 12:22:49.377571 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:49.877548251 +0000 UTC m=+144.127230397 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.377601 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:49 crc kubenswrapper[5002]: E0930 12:22:49.377882 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:49.877874681 +0000 UTC m=+144.127556827 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.384148 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2pmxc" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.390691 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-l9vch" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.432647 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hc84k" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.455407 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320575-h29rv" Sep 30 12:22:49 crc kubenswrapper[5002]: W0930 12:22:49.476717 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod54790eef_93bc_4d88_bcc5_3a2f6e1c1fa5.slice/crio-eaeaa9f8883f909b98fffae0389712e597415cd4b24a32d259ec3acccb4c228e WatchSource:0}: Error finding container eaeaa9f8883f909b98fffae0389712e597415cd4b24a32d259ec3acccb4c228e: Status 404 returned error can't find the container with id eaeaa9f8883f909b98fffae0389712e597415cd4b24a32d259ec3acccb4c228e Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.478432 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:49 crc kubenswrapper[5002]: E0930 12:22:49.478549 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:49.978534434 +0000 UTC m=+144.228216580 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.478701 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:49 crc kubenswrapper[5002]: E0930 12:22:49.479014 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:49.979004367 +0000 UTC m=+144.228686523 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.496652 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-2fqm7" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.579353 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:49 crc kubenswrapper[5002]: E0930 12:22:49.579550 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:50.079525517 +0000 UTC m=+144.329207663 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.579611 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:49 crc kubenswrapper[5002]: E0930 12:22:49.579970 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:50.079962449 +0000 UTC m=+144.329644595 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.600743 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-6rw5f" event={"ID":"834344ed-4a8b-4a1e-be84-33622d21d0af","Type":"ContainerStarted","Data":"b38dbb488d1f360623201eb8caab206d326c6e7c47fa7f7e8a30accd453462c2"} Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.603410 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" event={"ID":"b9be5683-0681-4c81-a534-a5f997ccde65","Type":"ContainerStarted","Data":"f84fc636d0102a843d9c81a5c0bd10fdad343bc7123c13cdf4a5a44172f89bc7"} Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.608317 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-tdxkp" event={"ID":"54790eef-93bc-4d88-bcc5-3a2f6e1c1fa5","Type":"ContainerStarted","Data":"eaeaa9f8883f909b98fffae0389712e597415cd4b24a32d259ec3acccb4c228e"} Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.610429 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8kz2p" event={"ID":"caaf9351-4cc8-41c4-8ebc-80e0243f23ad","Type":"ContainerStarted","Data":"361c3e6b538e35834ee87bc26f82fd010930e2fd1a243dcdd4f0c8452a3e8798"} Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.610450 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8kz2p" event={"ID":"caaf9351-4cc8-41c4-8ebc-80e0243f23ad","Type":"ContainerStarted","Data":"9f78cb7ea59db63e4d7306313db4d4cf004675a9f3c8d2c3f15d36b9b3df09d0"} Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.637450 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h7jgw" event={"ID":"8978cad0-f5e7-4cd8-a862-225087df083e","Type":"ContainerStarted","Data":"ffb70899762c29ed26a0fffc22fc8b2ac8b67c957ec1359b8068d20b48841214"} Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.661130 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-8m9sz" event={"ID":"34704ef4-bb3a-4e40-ac9e-7543c634d17f","Type":"ContainerStarted","Data":"3d219547eb11e75a1298e49361ddb7df74965315e9962a14ec4e8e23dfb757b4"} Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.661171 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-8m9sz" event={"ID":"34704ef4-bb3a-4e40-ac9e-7543c634d17f","Type":"ContainerStarted","Data":"f3c67eeaa995c15982f76043972b52970ce7d03bd6feebf631892bbe5d898459"} Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.661712 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-8m9sz" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.666263 5002 patch_prober.go:28] interesting pod/console-operator-58897d9998-8m9sz container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.13:8443/readyz\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.666325 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-8m9sz" podUID="34704ef4-bb3a-4e40-ac9e-7543c634d17f" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.13:8443/readyz\": dial tcp 10.217.0.13:8443: connect: connection refused" Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.669967 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-trnhx" event={"ID":"962ecf1e-f7d6-46ca-9846-0f2c84d71b54","Type":"ContainerStarted","Data":"38674fdadbff7d913be686d712e98f60875e2feb679f61913fdb587c56c35a26"} Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.670021 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-trnhx" event={"ID":"962ecf1e-f7d6-46ca-9846-0f2c84d71b54","Type":"ContainerStarted","Data":"e35e7de9b8d50696aafedcecc358cadd31f5413e3b1637905a81782da668612f"} Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.671689 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ngjm2" event={"ID":"325572f4-db9f-40a9-bd48-df93e3ec42ed","Type":"ContainerStarted","Data":"e50cf97c9468b2b2fe1cb7d955931cb1815b7d273eaa87ce7f344c596e125626"} Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.673944 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-rps48" event={"ID":"454fbe1a-1b3c-46e8-855c-a2dd62ea2b9e","Type":"ContainerStarted","Data":"72ff05af18797cb3212a04f6c022b368b426207dd1654c3631cfcdfbb3b3c4cc"} Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.681010 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:49 crc kubenswrapper[5002]: E0930 12:22:49.685816 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:50.185795848 +0000 UTC m=+144.435477994 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.783780 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:49 crc kubenswrapper[5002]: E0930 12:22:49.787920 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:50.287905792 +0000 UTC m=+144.537587938 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:49 crc kubenswrapper[5002]: I0930 12:22:49.894938 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:49 crc kubenswrapper[5002]: E0930 12:22:49.895439 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:50.395421588 +0000 UTC m=+144.645103724 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:49.996864 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-lv4mz"] Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:49.996910 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-wk84c"] Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.006188 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:50 crc kubenswrapper[5002]: E0930 12:22:50.006633 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:50.506616017 +0000 UTC m=+144.756298163 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.006802 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-79sft"] Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.020614 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8kz2p" podStartSLOduration=120.02059855 podStartE2EDuration="2m0.02059855s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:50.019251692 +0000 UTC m=+144.268933848" watchObservedRunningTime="2025-09-30 12:22:50.02059855 +0000 UTC m=+144.270280696" Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.046342 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-z66g4" podStartSLOduration=121.046325591 podStartE2EDuration="2m1.046325591s" podCreationTimestamp="2025-09-30 12:20:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:50.045385684 +0000 UTC m=+144.295067830" watchObservedRunningTime="2025-09-30 12:22:50.046325591 +0000 UTC m=+144.296007727" Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.061026 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sczzw"] Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.107441 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:50 crc kubenswrapper[5002]: E0930 12:22:50.107698 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:50.607683372 +0000 UTC m=+144.857365518 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.121247 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-p582f" podStartSLOduration=120.121230293 podStartE2EDuration="2m0.121230293s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:50.120489321 +0000 UTC m=+144.370171487" watchObservedRunningTime="2025-09-30 12:22:50.121230293 +0000 UTC m=+144.370912439" Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.132047 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m"] Sep 30 12:22:50 crc kubenswrapper[5002]: W0930 12:22:50.163215 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf7cb4999_7580_47a2_a3e6_764d49684eaf.slice/crio-ad8084f15541a8e504ee0e6d68041af2c890d2aa2e7864bade77311c7c03a2f7 WatchSource:0}: Error finding container ad8084f15541a8e504ee0e6d68041af2c890d2aa2e7864bade77311c7c03a2f7: Status 404 returned error can't find the container with id ad8084f15541a8e504ee0e6d68041af2c890d2aa2e7864bade77311c7c03a2f7 Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.208219 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:50 crc kubenswrapper[5002]: E0930 12:22:50.208593 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:50.708582442 +0000 UTC m=+144.958264588 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.309104 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:50 crc kubenswrapper[5002]: E0930 12:22:50.309432 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:50.809417131 +0000 UTC m=+145.059099277 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.411101 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:50 crc kubenswrapper[5002]: E0930 12:22:50.411483 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:50.911471064 +0000 UTC m=+145.161153210 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.512786 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:50 crc kubenswrapper[5002]: E0930 12:22:50.527313 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:51.027286813 +0000 UTC m=+145.276968959 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.554125 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-h52ns"] Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.613888 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:50 crc kubenswrapper[5002]: E0930 12:22:50.614257 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:51.114246532 +0000 UTC m=+145.363928668 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:50 crc kubenswrapper[5002]: W0930 12:22:50.714680 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podccd47538_6f91_4c6d_91b0_afccf0c83b20.slice/crio-eb3040e9eb8ecddc5118e1c29992329d0d6462fec27d09715cc03f712c19de2c WatchSource:0}: Error finding container eb3040e9eb8ecddc5118e1c29992329d0d6462fec27d09715cc03f712c19de2c: Status 404 returned error can't find the container with id eb3040e9eb8ecddc5118e1c29992329d0d6462fec27d09715cc03f712c19de2c Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.715540 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:50 crc kubenswrapper[5002]: E0930 12:22:50.715919 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:51.215900393 +0000 UTC m=+145.465582539 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.716852 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-fp74c"] Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.723660 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-hns42"] Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.742222 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m" event={"ID":"1c0ed680-87c2-438b-aca7-b9fa1c19d414","Type":"ContainerStarted","Data":"794aca28c140bc8dad657b302415dcdf625450a569c0d573f5a490373e1cf4ca"} Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.765997 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n5jzk"] Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.775078 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-znwgl"] Sep 30 12:22:50 crc kubenswrapper[5002]: W0930 12:22:50.775355 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod14158f4f_292f_4693_9ed2_c7fa97452ecb.slice/crio-51ed06c50716939a9215c9bc6d7a6ad419a7936bf26f5e1b1309d76d2e3c18e8 WatchSource:0}: Error finding container 51ed06c50716939a9215c9bc6d7a6ad419a7936bf26f5e1b1309d76d2e3c18e8: Status 404 returned error can't find the container with id 51ed06c50716939a9215c9bc6d7a6ad419a7936bf26f5e1b1309d76d2e3c18e8 Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.779671 5002 generic.go:334] "Generic (PLEG): container finished" podID="b9be5683-0681-4c81-a534-a5f997ccde65" containerID="7dac059ddeba3c809224e51be1ba65622edf4e3dde021339df1c0ae04dada15b" exitCode=0 Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.779891 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" event={"ID":"b9be5683-0681-4c81-a534-a5f997ccde65","Type":"ContainerDied","Data":"7dac059ddeba3c809224e51be1ba65622edf4e3dde021339df1c0ae04dada15b"} Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.783638 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-xnv2t"] Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.795373 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-79sft" event={"ID":"7151e9d9-4417-40bb-aac5-8f838065aa79","Type":"ContainerStarted","Data":"db9f35aac93001499f179a91ffed4dd41ca88574dcf65e4ed661bc61bc833a56"} Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.795517 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-79sft" event={"ID":"7151e9d9-4417-40bb-aac5-8f838065aa79","Type":"ContainerStarted","Data":"a1c29cdfaebcba48a1a0b36cc0b0389612b116f5259c975ec5730e77fa8cfe19"} Sep 30 12:22:50 crc kubenswrapper[5002]: W0930 12:22:50.806704 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2c6f790c_415f_46c9_baba_251cdc1e14b2.slice/crio-986d70d6b7b5b739a1ac976add9951443a99dfe932afd5e7f1544a23d10c018c WatchSource:0}: Error finding container 986d70d6b7b5b739a1ac976add9951443a99dfe932afd5e7f1544a23d10c018c: Status 404 returned error can't find the container with id 986d70d6b7b5b739a1ac976add9951443a99dfe932afd5e7f1544a23d10c018c Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.810225 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-rps48" event={"ID":"454fbe1a-1b3c-46e8-855c-a2dd62ea2b9e","Type":"ContainerStarted","Data":"175707eac60cc0f86487eb0d1fa14dba4819eb86d20c3e6ea38750cbc1e1ec78"} Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.817097 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:50 crc kubenswrapper[5002]: E0930 12:22:50.817421 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:51.317410621 +0000 UTC m=+145.567092767 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.831920 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-h52ns" event={"ID":"cae8a060-bc60-476a-9bee-11637282a23f","Type":"ContainerStarted","Data":"255c99db6a75f37fd5764432b735098b9c012e20fa491b97051f9ba4a5843166"} Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.842600 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-8m9sz" podStartSLOduration=120.842578587 podStartE2EDuration="2m0.842578587s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:50.840986352 +0000 UTC m=+145.090668498" watchObservedRunningTime="2025-09-30 12:22:50.842578587 +0000 UTC m=+145.092260723" Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.842869 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-lv4mz" event={"ID":"f7cb4999-7580-47a2-a3e6-764d49684eaf","Type":"ContainerStarted","Data":"ad8084f15541a8e504ee0e6d68041af2c890d2aa2e7864bade77311c7c03a2f7"} Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.863146 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-dgbs4"] Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.866262 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2pmxc"] Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.869741 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dnmjr"] Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.874108 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-4nrzw"] Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.879545 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-trnhx" podStartSLOduration=121.879523473 podStartE2EDuration="2m1.879523473s" podCreationTimestamp="2025-09-30 12:20:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:50.87763248 +0000 UTC m=+145.127314626" watchObservedRunningTime="2025-09-30 12:22:50.879523473 +0000 UTC m=+145.129205629" Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.880701 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h7jgw" event={"ID":"8978cad0-f5e7-4cd8-a862-225087df083e","Type":"ContainerStarted","Data":"02f33979ccffd18f62feb1074a09ce9e9a361bf8ddd6bff124f503b6ece74555"} Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.902423 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sczzw" event={"ID":"dfee3432-722f-454a-bd40-36fcfe1fb935","Type":"ContainerStarted","Data":"dd30e6f75d5cab68d797e3ea1ad2c19ed42c9c270ee561daeedec255074b3f82"} Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.903212 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sczzw" Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.903230 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v6kxk"] Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.905506 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-djckq"] Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.912113 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-wqhm8"] Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.917043 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2jxnc"] Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.920657 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:50 crc kubenswrapper[5002]: E0930 12:22:50.922946 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:51.42292753 +0000 UTC m=+145.672609676 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.929280 5002 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-sczzw container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.42:5443/healthz\": dial tcp 10.217.0.42:5443: connect: connection refused" start-of-body= Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.929309 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sczzw" podUID="dfee3432-722f-454a-bd40-36fcfe1fb935" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.42:5443/healthz\": dial tcp 10.217.0.42:5443: connect: connection refused" Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.929360 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-2lfhz"] Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.932770 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wtzqd"] Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.935892 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6w5w8"] Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.944714 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-2fqm7"] Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.971702 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-6rw5f" event={"ID":"834344ed-4a8b-4a1e-be84-33622d21d0af","Type":"ContainerStarted","Data":"8471d0ded397c3366a1938ed454dca9a6ae577ed7b4b9a218b3f6c1134f24940"} Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.978194 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sczzw" podStartSLOduration=120.97817142 podStartE2EDuration="2m0.97817142s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:50.955045371 +0000 UTC m=+145.204727517" watchObservedRunningTime="2025-09-30 12:22:50.97817142 +0000 UTC m=+145.227853566" Sep 30 12:22:50 crc kubenswrapper[5002]: W0930 12:22:50.987948 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode10da470_8dec_47bc_8d4b_0cbc5362aea0.slice/crio-8f037a442b30fd3710089a3b6fb55a3c7527636a4d149df7f07ebd45abba0477 WatchSource:0}: Error finding container 8f037a442b30fd3710089a3b6fb55a3c7527636a4d149df7f07ebd45abba0477: Status 404 returned error can't find the container with id 8f037a442b30fd3710089a3b6fb55a3c7527636a4d149df7f07ebd45abba0477 Sep 30 12:22:50 crc kubenswrapper[5002]: I0930 12:22:50.995237 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-79sft" podStartSLOduration=120.995203128 podStartE2EDuration="2m0.995203128s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:50.99316082 +0000 UTC m=+145.242842966" watchObservedRunningTime="2025-09-30 12:22:50.995203128 +0000 UTC m=+145.244885284" Sep 30 12:22:51 crc kubenswrapper[5002]: W0930 12:22:51.002787 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5f1cdbf0_65a0_4621_a4de_2337b8151bd3.slice/crio-2e11f912bd70bff67e01c7038487d8e6953dfc3e579798c96d84a03889e47c3f WatchSource:0}: Error finding container 2e11f912bd70bff67e01c7038487d8e6953dfc3e579798c96d84a03889e47c3f: Status 404 returned error can't find the container with id 2e11f912bd70bff67e01c7038487d8e6953dfc3e579798c96d84a03889e47c3f Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.003000 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ngjm2" event={"ID":"325572f4-db9f-40a9-bd48-df93e3ec42ed","Type":"ContainerStarted","Data":"eab0cba7e465f90da87b0b73ee7b243c2031171b2dd5bc663c338e85963c638a"} Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.008113 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-6rw5f" Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.009782 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" event={"ID":"87454cc1-5175-495b-9551-a19474d51e4a","Type":"ContainerStarted","Data":"67210a5e5e2f75d82525924fe0045c80e621408d5844dbe0f814c4fd9ace1eaf"} Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.010271 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.010372 5002 patch_prober.go:28] interesting pod/router-default-5444994796-6rw5f container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.010420 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6rw5f" podUID="834344ed-4a8b-4a1e-be84-33622d21d0af" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.016135 5002 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-wk84c container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.14:6443/healthz\": dial tcp 10.217.0.14:6443: connect: connection refused" start-of-body= Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.016170 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" podUID="87454cc1-5175-495b-9551-a19474d51e4a" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.14:6443/healthz\": dial tcp 10.217.0.14:6443: connect: connection refused" Sep 30 12:22:51 crc kubenswrapper[5002]: W0930 12:22:51.016761 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0eff3533_ad12_4c4b_bd1e_6a92d09d2835.slice/crio-dc930363a5384390eddebfdfbca96fe5b86f193ed31e7fc2ada1f869a6e8b091 WatchSource:0}: Error finding container dc930363a5384390eddebfdfbca96fe5b86f193ed31e7fc2ada1f869a6e8b091: Status 404 returned error can't find the container with id dc930363a5384390eddebfdfbca96fe5b86f193ed31e7fc2ada1f869a6e8b091 Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.017278 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-tdxkp" event={"ID":"54790eef-93bc-4d88-bcc5-3a2f6e1c1fa5","Type":"ContainerStarted","Data":"d8ae04531cc178bc7a5b7ce16dbc940e8c2880efddf4c33eb50eceac9d8752cf"} Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.018320 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-tdxkp" Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.020411 5002 patch_prober.go:28] interesting pod/downloads-7954f5f757-tdxkp container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.020455 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-tdxkp" podUID="54790eef-93bc-4d88-bcc5-3a2f6e1c1fa5" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.023233 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:51 crc kubenswrapper[5002]: E0930 12:22:51.025251 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:51.52523347 +0000 UTC m=+145.774915686 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:51 crc kubenswrapper[5002]: W0930 12:22:51.032071 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf1a5276b_5811_4a19_8d31_a46a1c787d8a.slice/crio-ad74feddafbf62309812f91c93fa151401a975477562cbfecbe1ac355982ab6d WatchSource:0}: Error finding container ad74feddafbf62309812f91c93fa151401a975477562cbfecbe1ac355982ab6d: Status 404 returned error can't find the container with id ad74feddafbf62309812f91c93fa151401a975477562cbfecbe1ac355982ab6d Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.042160 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h7jgw" podStartSLOduration=122.042137414 podStartE2EDuration="2m2.042137414s" podCreationTimestamp="2025-09-30 12:20:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:51.030408735 +0000 UTC m=+145.280090891" watchObservedRunningTime="2025-09-30 12:22:51.042137414 +0000 UTC m=+145.291819560" Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.045168 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-88xml"] Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.063129 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-l9vch"] Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.065420 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6tqs2"] Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.069021 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-rps48" podStartSLOduration=5.069004307 podStartE2EDuration="5.069004307s" podCreationTimestamp="2025-09-30 12:22:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:51.067142495 +0000 UTC m=+145.316824631" watchObservedRunningTime="2025-09-30 12:22:51.069004307 +0000 UTC m=+145.318686453" Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.079641 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320575-h29rv"] Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.090550 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9kzms"] Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.108382 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k6j67"] Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.125577 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:51 crc kubenswrapper[5002]: E0930 12:22:51.126832 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:51.62680633 +0000 UTC m=+145.876488476 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.128192 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:51 crc kubenswrapper[5002]: E0930 12:22:51.131216 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:51.631198663 +0000 UTC m=+145.880880919 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.139428 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hc84k"] Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.139902 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" podStartSLOduration=122.139888766 podStartE2EDuration="2m2.139888766s" podCreationTimestamp="2025-09-30 12:20:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:51.136288556 +0000 UTC m=+145.385970702" watchObservedRunningTime="2025-09-30 12:22:51.139888766 +0000 UTC m=+145.389570912" Sep 30 12:22:51 crc kubenswrapper[5002]: W0930 12:22:51.142031 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod89d8a136_01a4_487f_8160_9a0cce584cec.slice/crio-19a3809b65f659af60a9850cef5ad38df76767df94a98d72faf24a39bcb9d031 WatchSource:0}: Error finding container 19a3809b65f659af60a9850cef5ad38df76767df94a98d72faf24a39bcb9d031: Status 404 returned error can't find the container with id 19a3809b65f659af60a9850cef5ad38df76767df94a98d72faf24a39bcb9d031 Sep 30 12:22:51 crc kubenswrapper[5002]: W0930 12:22:51.146611 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod07d72c87_0749_4f19_b53f_7733b07cc149.slice/crio-b484e0bd45ef14e5680295a73bc297c16e9a152f184e2a9658699e522dc5cfc4 WatchSource:0}: Error finding container b484e0bd45ef14e5680295a73bc297c16e9a152f184e2a9658699e522dc5cfc4: Status 404 returned error can't find the container with id b484e0bd45ef14e5680295a73bc297c16e9a152f184e2a9658699e522dc5cfc4 Sep 30 12:22:51 crc kubenswrapper[5002]: W0930 12:22:51.153685 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5b1374f1_9722_4483_b976_775bdd8cb65a.slice/crio-a9a30b7d4e1b46bdcaa8526129b17f401627ff5e5a9dba9f828ddd7faaaf27b6 WatchSource:0}: Error finding container a9a30b7d4e1b46bdcaa8526129b17f401627ff5e5a9dba9f828ddd7faaaf27b6: Status 404 returned error can't find the container with id a9a30b7d4e1b46bdcaa8526129b17f401627ff5e5a9dba9f828ddd7faaaf27b6 Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.161359 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-6rw5f" podStartSLOduration=121.161340908 podStartE2EDuration="2m1.161340908s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:51.160599287 +0000 UTC m=+145.410281443" watchObservedRunningTime="2025-09-30 12:22:51.161340908 +0000 UTC m=+145.411023064" Sep 30 12:22:51 crc kubenswrapper[5002]: W0930 12:22:51.163637 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podacdc25a7_3353_430f_b856_22a1259025ee.slice/crio-26bf182a8d4a68ad62bf56999449f1e36c710f8ca51a232887e7e0c93a2dc57a WatchSource:0}: Error finding container 26bf182a8d4a68ad62bf56999449f1e36c710f8ca51a232887e7e0c93a2dc57a: Status 404 returned error can't find the container with id 26bf182a8d4a68ad62bf56999449f1e36c710f8ca51a232887e7e0c93a2dc57a Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.206021 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-tdxkp" podStartSLOduration=121.206007661 podStartE2EDuration="2m1.206007661s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:51.202589625 +0000 UTC m=+145.452271781" watchObservedRunningTime="2025-09-30 12:22:51.206007661 +0000 UTC m=+145.455689807" Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.233894 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ngjm2" podStartSLOduration=121.233879352 podStartE2EDuration="2m1.233879352s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:51.233196854 +0000 UTC m=+145.482879020" watchObservedRunningTime="2025-09-30 12:22:51.233879352 +0000 UTC m=+145.483561498" Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.236480 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:51 crc kubenswrapper[5002]: E0930 12:22:51.236711 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:51.736675811 +0000 UTC m=+145.986357957 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.236887 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:51 crc kubenswrapper[5002]: E0930 12:22:51.237279 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:51.737265318 +0000 UTC m=+145.986947464 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.338143 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:51 crc kubenswrapper[5002]: E0930 12:22:51.338352 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:51.838319763 +0000 UTC m=+146.088001919 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.338424 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:51 crc kubenswrapper[5002]: E0930 12:22:51.338749 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:51.838736544 +0000 UTC m=+146.088418690 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.439845 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:51 crc kubenswrapper[5002]: E0930 12:22:51.440302 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:51.940285703 +0000 UTC m=+146.189967849 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.541173 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:51 crc kubenswrapper[5002]: E0930 12:22:51.541852 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:52.041839731 +0000 UTC m=+146.291521877 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.648038 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:51 crc kubenswrapper[5002]: E0930 12:22:51.648343 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:52.148328808 +0000 UTC m=+146.398010954 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.749308 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:51 crc kubenswrapper[5002]: E0930 12:22:51.749766 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:52.249751434 +0000 UTC m=+146.499433580 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.849821 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:51 crc kubenswrapper[5002]: E0930 12:22:51.850698 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:52.350683965 +0000 UTC m=+146.600366101 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.939826 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.940725 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.951569 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:51 crc kubenswrapper[5002]: E0930 12:22:51.951922 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:52.451910364 +0000 UTC m=+146.701592510 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:51 crc kubenswrapper[5002]: I0930 12:22:51.958547 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.017202 5002 patch_prober.go:28] interesting pod/router-default-5444994796-6rw5f container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 12:22:52 crc kubenswrapper[5002]: [-]has-synced failed: reason withheld Sep 30 12:22:52 crc kubenswrapper[5002]: [+]process-running ok Sep 30 12:22:52 crc kubenswrapper[5002]: healthz check failed Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.017270 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6rw5f" podUID="834344ed-4a8b-4a1e-be84-33622d21d0af" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.018965 5002 patch_prober.go:28] interesting pod/console-operator-58897d9998-8m9sz container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.13:8443/readyz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.018993 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-8m9sz" podUID="34704ef4-bb3a-4e40-ac9e-7543c634d17f" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.13:8443/readyz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.038540 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wtzqd" event={"ID":"846fc575-9b63-46b3-bd9d-dfc8ab0bf7a2","Type":"ContainerStarted","Data":"b2113e60a1b721659232d504721f214516d02986b12e7c7a9fd18c91f82cc712"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.045423 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-dgbs4" event={"ID":"5f1cdbf0-65a0-4621-a4de-2337b8151bd3","Type":"ContainerStarted","Data":"2e11f912bd70bff67e01c7038487d8e6953dfc3e579798c96d84a03889e47c3f"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.052303 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:52 crc kubenswrapper[5002]: E0930 12:22:52.052411 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:52.552396593 +0000 UTC m=+146.802078739 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.052670 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:52 crc kubenswrapper[5002]: E0930 12:22:52.053061 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:52.55304015 +0000 UTC m=+146.802722376 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.053627 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-h52ns" event={"ID":"cae8a060-bc60-476a-9bee-11637282a23f","Type":"ContainerStarted","Data":"09342a48abb5e39bf9ff8cc8b0f6bc833cf1e690724581cee6014599f032644b"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.057914 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-xnv2t" event={"ID":"e536d056-a018-4d58-a85a-24d70c2116fc","Type":"ContainerStarted","Data":"1d807ce546dbe5f7deca0859cc23ad2c17c0520f3767c326d938b2a3679b8996"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.057952 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-xnv2t" event={"ID":"e536d056-a018-4d58-a85a-24d70c2116fc","Type":"ContainerStarted","Data":"3c93fd17729829dd15a76edeb25a9a8d67dab6ec87fd6b5f842a79d3df524f4d"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.081977 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6tqs2" event={"ID":"796cc797-549b-40ee-8bf9-66a8d9b54703","Type":"ContainerStarted","Data":"2b22bfc52fdc7def0ab2ced80e113227fc297492e67d5a0440728bd1407f0ac8"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.082018 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6tqs2" event={"ID":"796cc797-549b-40ee-8bf9-66a8d9b54703","Type":"ContainerStarted","Data":"bd05a2f35580e9214351c45d7d5cc93ea81dca37e24c600dcec5bd0bc6a729e6"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.086203 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6tqs2" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.104951 5002 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-6tqs2 container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" start-of-body= Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.105394 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6tqs2" podUID="796cc797-549b-40ee-8bf9-66a8d9b54703" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.110054 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-xnv2t" podStartSLOduration=123.098473955 podStartE2EDuration="2m3.098473955s" podCreationTimestamp="2025-09-30 12:20:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:52.093231458 +0000 UTC m=+146.342913604" watchObservedRunningTime="2025-09-30 12:22:52.098473955 +0000 UTC m=+146.348156101" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.127836 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6tqs2" podStartSLOduration=122.127820689 podStartE2EDuration="2m2.127820689s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:52.118867317 +0000 UTC m=+146.368549473" watchObservedRunningTime="2025-09-30 12:22:52.127820689 +0000 UTC m=+146.377502835" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.128867 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2lfhz" event={"ID":"fef68bfa-20e4-4a83-8642-be28f0f0b31a","Type":"ContainerStarted","Data":"aa1939da3c9d042bc24ee33fc38f3174d8c7fbe6ab79c17e64239491081d8673"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.129203 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2lfhz" event={"ID":"fef68bfa-20e4-4a83-8642-be28f0f0b31a","Type":"ContainerStarted","Data":"98f24f0e25913d5eff03b6eb14d63b43f64de039b93b23a443e985d1c6b9f7b3"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.151189 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" event={"ID":"b9be5683-0681-4c81-a534-a5f997ccde65","Type":"ContainerStarted","Data":"7961a46f56f23dd97358d7f0f819c01fbe7f5a488c6cda76c802bd5ca34871f4"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.155084 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:52 crc kubenswrapper[5002]: E0930 12:22:52.155326 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:52.655313629 +0000 UTC m=+146.904995775 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.155681 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-88xml" event={"ID":"b282f088-14fa-47c0-a8a8-1285a8a7c296","Type":"ContainerStarted","Data":"d5e7e7f0e22ac67fb346d7253b52138f709a0d547a77e75b9f8ac4897e692e8c"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.155882 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:52 crc kubenswrapper[5002]: E0930 12:22:52.156180 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:52.656172294 +0000 UTC m=+146.905854440 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.177228 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" podStartSLOduration=122.177213554 podStartE2EDuration="2m2.177213554s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:52.176211586 +0000 UTC m=+146.425893732" watchObservedRunningTime="2025-09-30 12:22:52.177213554 +0000 UTC m=+146.426895700" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.205948 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-4nrzw" event={"ID":"0eff3533-ad12-4c4b-bd1e-6a92d09d2835","Type":"ContainerStarted","Data":"bd0f1c9de0228487d20396ce2138d06ae5e2cc4c79cf150084906491185ff9d7"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.205983 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-4nrzw" event={"ID":"0eff3533-ad12-4c4b-bd1e-6a92d09d2835","Type":"ContainerStarted","Data":"dc930363a5384390eddebfdfbca96fe5b86f193ed31e7fc2ada1f869a6e8b091"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.234851 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-znwgl" event={"ID":"2c6f790c-415f-46c9-baba-251cdc1e14b2","Type":"ContainerStarted","Data":"986d70d6b7b5b739a1ac976add9951443a99dfe932afd5e7f1544a23d10c018c"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.241477 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-4nrzw" podStartSLOduration=122.241451336 podStartE2EDuration="2m2.241451336s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:52.235110088 +0000 UTC m=+146.484792244" watchObservedRunningTime="2025-09-30 12:22:52.241451336 +0000 UTC m=+146.491133482" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.261923 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:52 crc kubenswrapper[5002]: E0930 12:22:52.262227 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:52.762198098 +0000 UTC m=+147.011880244 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.262634 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:52 crc kubenswrapper[5002]: E0930 12:22:52.263677 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:52.763665059 +0000 UTC m=+147.013347205 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.284755 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djckq" event={"ID":"1fa6d0bc-ffc5-4974-bb50-123dd96187bd","Type":"ContainerStarted","Data":"f1bd55127248393301df23578687a4dcc13770778f33c549ba498d4cf469a796"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.284808 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djckq" event={"ID":"1fa6d0bc-ffc5-4974-bb50-123dd96187bd","Type":"ContainerStarted","Data":"4a3b198e65f2447738e23431636b5a228db630017b318807149f7dff2ce8195f"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.294053 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k6j67" event={"ID":"5b1374f1-9722-4483-b976-775bdd8cb65a","Type":"ContainerStarted","Data":"a9a30b7d4e1b46bdcaa8526129b17f401627ff5e5a9dba9f828ddd7faaaf27b6"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.301740 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-lv4mz" event={"ID":"f7cb4999-7580-47a2-a3e6-764d49684eaf","Type":"ContainerStarted","Data":"5f27209c071da126f128ead04639b58a25c7236d847ac692a435c12977297961"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.309660 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6w5w8" event={"ID":"364932b8-9721-41da-a1a3-7b9a1977da84","Type":"ContainerStarted","Data":"90f4cf09fa9769fdeaa813d4114732f27ef326e2a82034e6f0b3a415af7061a5"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.331903 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-lv4mz" podStartSLOduration=122.331889792 podStartE2EDuration="2m2.331889792s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:52.329716412 +0000 UTC m=+146.579398558" watchObservedRunningTime="2025-09-30 12:22:52.331889792 +0000 UTC m=+146.581571938" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.335478 5002 generic.go:334] "Generic (PLEG): container finished" podID="14158f4f-292f-4693-9ed2-c7fa97452ecb" containerID="60696d4c6e884bf58cd53bfa5aab18317737158b092529b915af58ab51a0d70d" exitCode=0 Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.335631 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hns42" event={"ID":"14158f4f-292f-4693-9ed2-c7fa97452ecb","Type":"ContainerDied","Data":"60696d4c6e884bf58cd53bfa5aab18317737158b092529b915af58ab51a0d70d"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.335664 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hns42" event={"ID":"14158f4f-292f-4693-9ed2-c7fa97452ecb","Type":"ContainerStarted","Data":"51ed06c50716939a9215c9bc6d7a6ad419a7936bf26f5e1b1309d76d2e3c18e8"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.353440 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2jxnc" event={"ID":"f1a5276b-5811-4a19-8d31-a46a1c787d8a","Type":"ContainerStarted","Data":"ad74feddafbf62309812f91c93fa151401a975477562cbfecbe1ac355982ab6d"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.380216 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6w5w8" podStartSLOduration=122.380199098 podStartE2EDuration="2m2.380199098s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:52.364102356 +0000 UTC m=+146.613784502" watchObservedRunningTime="2025-09-30 12:22:52.380199098 +0000 UTC m=+146.629881244" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.380684 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:52 crc kubenswrapper[5002]: E0930 12:22:52.380969 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:52.880946908 +0000 UTC m=+147.130629054 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.385912 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-2fqm7" event={"ID":"fea8ba5e-73ec-4bce-99f6-72b720813d96","Type":"ContainerStarted","Data":"4e1e8216b29f594dffce50b38a42310307f60c32af63924c8df4914cc8a63a05"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.385950 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-2fqm7" event={"ID":"fea8ba5e-73ec-4bce-99f6-72b720813d96","Type":"ContainerStarted","Data":"d0452703f47063848b7859c92000e1e84b6a5e89dab9eb81ecfbd87c1d7bdfd8"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.409925 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sczzw" event={"ID":"dfee3432-722f-454a-bd40-36fcfe1fb935","Type":"ContainerStarted","Data":"58bf3b6e45512e9317c6f45b3a00f2241d3ac9ed35c5e82452d2539d2f77d46a"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.417182 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-2fqm7" podStartSLOduration=6.417168224 podStartE2EDuration="6.417168224s" podCreationTimestamp="2025-09-30 12:22:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:52.416235929 +0000 UTC m=+146.665918075" watchObservedRunningTime="2025-09-30 12:22:52.417168224 +0000 UTC m=+146.666850370" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.424192 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320575-h29rv" event={"ID":"0335170b-46ee-4cd3-aae5-694623192d49","Type":"ContainerStarted","Data":"e5f30a7df10e71d5ace83c67ac0a4f56ccf3103b223a855bc4d9b2cadaf9c552"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.444473 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-l9vch" event={"ID":"89d8a136-01a4-487f-8160-9a0cce584cec","Type":"ContainerStarted","Data":"19a3809b65f659af60a9850cef5ad38df76767df94a98d72faf24a39bcb9d031"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.458061 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29320575-h29rv" podStartSLOduration=123.458040972 podStartE2EDuration="2m3.458040972s" podCreationTimestamp="2025-09-30 12:20:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:52.455310675 +0000 UTC m=+146.704992841" watchObservedRunningTime="2025-09-30 12:22:52.458040972 +0000 UTC m=+146.707723118" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.471126 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2pmxc" event={"ID":"f27fcf7d-17db-407a-b6ee-e34779332edf","Type":"ContainerStarted","Data":"f1c8bdfbdbb119758644281784fc4bbfe6e107be4e1d7c93cee6aa6c40edd7c8"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.471168 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2pmxc" event={"ID":"f27fcf7d-17db-407a-b6ee-e34779332edf","Type":"ContainerStarted","Data":"23b8b4b81e03c48ba0739909419c8c15b977c3869defe58697ba85234960d43e"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.482258 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:52 crc kubenswrapper[5002]: E0930 12:22:52.483552 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:52.983539167 +0000 UTC m=+147.233221313 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.519213 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" event={"ID":"87454cc1-5175-495b-9551-a19474d51e4a","Type":"ContainerStarted","Data":"f0ef6708f1f414ffd0dfc09c2763631d0ef154c8d6a5db080fed97bc7ee6ea04"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.527561 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v6kxk" event={"ID":"bae0075d-69e4-48a2-91f2-a6e2df1529f0","Type":"ContainerStarted","Data":"1fea5a1546c9a9e2a2386e9cb50f24f4f1ae07d7c02d116dd53f1bbd4b42cf08"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.542403 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hc84k" event={"ID":"acdc25a7-3353-430f-b856-22a1259025ee","Type":"ContainerStarted","Data":"26bf182a8d4a68ad62bf56999449f1e36c710f8ca51a232887e7e0c93a2dc57a"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.542904 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-hc84k" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.549679 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m" event={"ID":"1c0ed680-87c2-438b-aca7-b9fa1c19d414","Type":"ContainerStarted","Data":"0112a1ce6c04952e2d2116012abdb3d17afad7d81dc44c97efa2c65ac68abbaa"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.553085 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.569284 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2pmxc" podStartSLOduration=122.569265051 podStartE2EDuration="2m2.569265051s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:52.514216357 +0000 UTC m=+146.763898503" watchObservedRunningTime="2025-09-30 12:22:52.569265051 +0000 UTC m=+146.818947187" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.569757 5002 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-hc84k container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" start-of-body= Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.569788 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-hc84k" podUID="acdc25a7-3353-430f-b856-22a1259025ee" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.570995 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v6kxk" podStartSLOduration=122.570990509 podStartE2EDuration="2m2.570990509s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:52.570102265 +0000 UTC m=+146.819784411" watchObservedRunningTime="2025-09-30 12:22:52.570990509 +0000 UTC m=+146.820672655" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.585189 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dnmjr" event={"ID":"e10da470-8dec-47bc-8d4b-0cbc5362aea0","Type":"ContainerStarted","Data":"203c6dfe0493c18347741be2bafcb9774d67197feba700d94e176b3dedf2af75"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.585239 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dnmjr" event={"ID":"e10da470-8dec-47bc-8d4b-0cbc5362aea0","Type":"ContainerStarted","Data":"8f037a442b30fd3710089a3b6fb55a3c7527636a4d149df7f07ebd45abba0477"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.586200 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:52 crc kubenswrapper[5002]: E0930 12:22:52.586529 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:53.086507825 +0000 UTC m=+147.336189971 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.586714 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:52 crc kubenswrapper[5002]: E0930 12:22:52.587676 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:53.087669127 +0000 UTC m=+147.337351273 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.601719 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-hc84k" podStartSLOduration=122.601702181 podStartE2EDuration="2m2.601702181s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:52.598930443 +0000 UTC m=+146.848612599" watchObservedRunningTime="2025-09-30 12:22:52.601702181 +0000 UTC m=+146.851384327" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.609245 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.612735 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n5jzk" event={"ID":"b91e38cc-c852-407d-8efd-227f0bfaa5fb","Type":"ContainerStarted","Data":"ab3d31b1199757bffa455cd8fd46c35048ab96383e132e6d78fa4e192461b8e0"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.612784 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n5jzk" event={"ID":"b91e38cc-c852-407d-8efd-227f0bfaa5fb","Type":"ContainerStarted","Data":"bf9779f32705f799f5e83211b5d3da0c279067b1a1a4ca1c3a1cc9e8ef3d967d"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.622296 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9kzms" event={"ID":"07d72c87-0749-4f19-b53f-7733b07cc149","Type":"ContainerStarted","Data":"b484e0bd45ef14e5680295a73bc297c16e9a152f184e2a9658699e522dc5cfc4"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.622818 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9kzms" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.634164 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m" podStartSLOduration=122.634146121 podStartE2EDuration="2m2.634146121s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:52.617205686 +0000 UTC m=+146.866887852" watchObservedRunningTime="2025-09-30 12:22:52.634146121 +0000 UTC m=+146.883828267" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.634978 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-fp74c" event={"ID":"ccd47538-6f91-4c6d-91b0-afccf0c83b20","Type":"ContainerStarted","Data":"fa0b52a15d563dc3d46163e5ef9fd8f95e9536316e4e81429337b0e46486df26"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.635014 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-fp74c" event={"ID":"ccd47538-6f91-4c6d-91b0-afccf0c83b20","Type":"ContainerStarted","Data":"eb3040e9eb8ecddc5118e1c29992329d0d6462fec27d09715cc03f712c19de2c"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.659596 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-wqhm8" event={"ID":"c53fef56-513a-439c-a05a-cda26f9e3855","Type":"ContainerStarted","Data":"b1c040ffee2392b00aa630ffb81a5faf92335736676aecbf6b3f82b7b05bc55e"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.659873 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-wqhm8" event={"ID":"c53fef56-513a-439c-a05a-cda26f9e3855","Type":"ContainerStarted","Data":"84bfc8343df3ee2eb4e652bcd09a84b1f69edce74ee630ff39a838c5c1915ca2"} Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.661080 5002 patch_prober.go:28] interesting pod/downloads-7954f5f757-tdxkp container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.661112 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-tdxkp" podUID="54790eef-93bc-4d88-bcc5-3a2f6e1c1fa5" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.664914 5002 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-9kzms container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.23:8443/healthz\": dial tcp 10.217.0.23:8443: connect: connection refused" start-of-body= Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.664966 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9kzms" podUID="07d72c87-0749-4f19-b53f-7733b07cc149" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.23:8443/healthz\": dial tcp 10.217.0.23:8443: connect: connection refused" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.666788 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-8m9sz" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.667758 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n5jzk" podStartSLOduration=123.667746323 podStartE2EDuration="2m3.667746323s" podCreationTimestamp="2025-09-30 12:20:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:52.666532739 +0000 UTC m=+146.916214895" watchObservedRunningTime="2025-09-30 12:22:52.667746323 +0000 UTC m=+146.917428469" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.668307 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-z66g4" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.687746 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:52 crc kubenswrapper[5002]: E0930 12:22:52.688070 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:53.188048993 +0000 UTC m=+147.437731139 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.688366 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:52 crc kubenswrapper[5002]: E0930 12:22:52.690063 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:53.190049099 +0000 UTC m=+147.439731245 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.695503 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dnmjr" podStartSLOduration=122.695475911 podStartE2EDuration="2m2.695475911s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:52.693843336 +0000 UTC m=+146.943525472" watchObservedRunningTime="2025-09-30 12:22:52.695475911 +0000 UTC m=+146.945158047" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.789411 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:52 crc kubenswrapper[5002]: E0930 12:22:52.789605 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:53.289580451 +0000 UTC m=+147.539262597 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.790169 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:52 crc kubenswrapper[5002]: E0930 12:22:52.790428 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:53.290416875 +0000 UTC m=+147.540099021 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.808589 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-wqhm8" podStartSLOduration=122.808568574 podStartE2EDuration="2m2.808568574s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:52.80488542 +0000 UTC m=+147.054567586" watchObservedRunningTime="2025-09-30 12:22:52.808568574 +0000 UTC m=+147.058250730" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.809460 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-fp74c" podStartSLOduration=122.809455659 podStartE2EDuration="2m2.809455659s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:52.784247592 +0000 UTC m=+147.033929738" watchObservedRunningTime="2025-09-30 12:22:52.809455659 +0000 UTC m=+147.059137805" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.860558 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9kzms" podStartSLOduration=122.860541372 podStartE2EDuration="2m2.860541372s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:52.859724448 +0000 UTC m=+147.109406604" watchObservedRunningTime="2025-09-30 12:22:52.860541372 +0000 UTC m=+147.110223518" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.890735 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:52 crc kubenswrapper[5002]: E0930 12:22:52.890925 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:53.390901904 +0000 UTC m=+147.640584050 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.891291 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:52 crc kubenswrapper[5002]: E0930 12:22:52.891665 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:53.391653914 +0000 UTC m=+147.641336060 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.963656 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:22:52 crc kubenswrapper[5002]: I0930 12:22:52.992807 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:52 crc kubenswrapper[5002]: E0930 12:22:52.993139 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:53.493110621 +0000 UTC m=+147.742792767 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.017511 5002 patch_prober.go:28] interesting pod/router-default-5444994796-6rw5f container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 12:22:53 crc kubenswrapper[5002]: [-]has-synced failed: reason withheld Sep 30 12:22:53 crc kubenswrapper[5002]: [+]process-running ok Sep 30 12:22:53 crc kubenswrapper[5002]: healthz check failed Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.017561 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6rw5f" podUID="834344ed-4a8b-4a1e-be84-33622d21d0af" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.094215 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:53 crc kubenswrapper[5002]: E0930 12:22:53.094610 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:53.594597967 +0000 UTC m=+147.844280113 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.195934 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:53 crc kubenswrapper[5002]: E0930 12:22:53.196094 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:53.696076294 +0000 UTC m=+147.945758440 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.196191 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:53 crc kubenswrapper[5002]: E0930 12:22:53.196439 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:53.696418883 +0000 UTC m=+147.946101029 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.220705 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.220744 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.222961 5002 patch_prober.go:28] interesting pod/apiserver-7bbb656c7d-pzv7v container/oauth-apiserver namespace/openshift-oauth-apiserver: Startup probe status=failure output="Get \"https://10.217.0.18:8443/livez\": dial tcp 10.217.0.18:8443: connect: connection refused" start-of-body= Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.223020 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" podUID="b9be5683-0681-4c81-a534-a5f997ccde65" containerName="oauth-apiserver" probeResult="failure" output="Get \"https://10.217.0.18:8443/livez\": dial tcp 10.217.0.18:8443: connect: connection refused" Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.297648 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:53 crc kubenswrapper[5002]: E0930 12:22:53.298068 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:53.798047163 +0000 UTC m=+148.047729319 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.399060 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:53 crc kubenswrapper[5002]: E0930 12:22:53.399405 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:53.899393037 +0000 UTC m=+148.149075183 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.408790 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sczzw" Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.499751 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:53 crc kubenswrapper[5002]: E0930 12:22:53.499901 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:53.999878466 +0000 UTC m=+148.249560622 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.500270 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:53 crc kubenswrapper[5002]: E0930 12:22:53.500645 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:54.000634887 +0000 UTC m=+148.250317043 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.601154 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:53 crc kubenswrapper[5002]: E0930 12:22:53.601328 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:54.10130053 +0000 UTC m=+148.350982676 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.601396 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:53 crc kubenswrapper[5002]: E0930 12:22:53.601727 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:54.101716382 +0000 UTC m=+148.351398528 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.646400 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.664753 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-l9vch" event={"ID":"89d8a136-01a4-487f-8160-9a0cce584cec","Type":"ContainerStarted","Data":"0ddc383712bada9d5b879ba7a7e62549f7f5eaa667ac5ead5e39f0f780cab111"} Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.664805 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-l9vch" event={"ID":"89d8a136-01a4-487f-8160-9a0cce584cec","Type":"ContainerStarted","Data":"68364d664059b97c00856193b82fcbc723acf8a2502995ed56479db39b4cdad9"} Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.665947 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v6kxk" event={"ID":"bae0075d-69e4-48a2-91f2-a6e2df1529f0","Type":"ContainerStarted","Data":"b5a66aac0f46ca029cb34dc3542b17ceccbdbeaf8a2d8c33522c8fcfb213c572"} Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.668052 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n5jzk" event={"ID":"b91e38cc-c852-407d-8efd-227f0bfaa5fb","Type":"ContainerStarted","Data":"025dd568547d4cfc30e39acf7d8ceccea260422a8529785f34300cb7f6a078ac"} Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.669525 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wtzqd" event={"ID":"846fc575-9b63-46b3-bd9d-dfc8ab0bf7a2","Type":"ContainerStarted","Data":"7645989a8e6b0f6f477724acd6ca831ffd6d7fa308e5b6e71c78a89004f1a914"} Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.671079 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djckq" event={"ID":"1fa6d0bc-ffc5-4974-bb50-123dd96187bd","Type":"ContainerStarted","Data":"e86bdfaab5190d99d700f88bbf7b91525cccfc258fc5121652a214d4a2349b79"} Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.672329 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-h52ns" event={"ID":"cae8a060-bc60-476a-9bee-11637282a23f","Type":"ContainerStarted","Data":"11234afd0b14ab3f72eab5d1aceeb6ed9d755af926d0a53cf239ab913f8f6098"} Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.672484 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-h52ns" Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.673667 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-znwgl" event={"ID":"2c6f790c-415f-46c9-baba-251cdc1e14b2","Type":"ContainerStarted","Data":"d41f83dd7de4654b57c1933571f7350c40e4f6cbfed5617ed53320c558e1374b"} Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.674894 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9kzms" event={"ID":"07d72c87-0749-4f19-b53f-7733b07cc149","Type":"ContainerStarted","Data":"0d47cf9336bce31775534137251af7f09067717780740134a9c05fde95324059"} Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.675595 5002 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-9kzms container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.23:8443/healthz\": dial tcp 10.217.0.23:8443: connect: connection refused" start-of-body= Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.675660 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9kzms" podUID="07d72c87-0749-4f19-b53f-7733b07cc149" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.23:8443/healthz\": dial tcp 10.217.0.23:8443: connect: connection refused" Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.676703 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2lfhz" event={"ID":"fef68bfa-20e4-4a83-8642-be28f0f0b31a","Type":"ContainerStarted","Data":"46804894410dbcda6483808021e76e613340467ffcc815f5b143ddfb6b65a00d"} Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.678127 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-fp74c" event={"ID":"ccd47538-6f91-4c6d-91b0-afccf0c83b20","Type":"ContainerStarted","Data":"63e875720458d920cbb4e3e5ae309e772ca745da6bfa30a0026ff78ee47ce9a6"} Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.681409 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-88xml" event={"ID":"b282f088-14fa-47c0-a8a8-1285a8a7c296","Type":"ContainerStarted","Data":"9668c12f118fa07084e9d52f5f029bd150fff85af2f6bfe7af04b839499acf88"} Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.681437 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-88xml" event={"ID":"b282f088-14fa-47c0-a8a8-1285a8a7c296","Type":"ContainerStarted","Data":"7b40a463844cdaad8becc247ab3d7bc69a926647a89d33dba8e4e40237e2a4c5"} Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.682682 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k6j67" event={"ID":"5b1374f1-9722-4483-b976-775bdd8cb65a","Type":"ContainerStarted","Data":"5401977626bd610848d0cdac1f2945aa690218cd4e801631331e5cfa0fdff221"} Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.683845 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6w5w8" event={"ID":"364932b8-9721-41da-a1a3-7b9a1977da84","Type":"ContainerStarted","Data":"49bf19869ba423b5108fc9303eef64a51defc3cc31fcb3f604276f5e6bb5f86e"} Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.685201 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-dgbs4" event={"ID":"5f1cdbf0-65a0-4621-a4de-2337b8151bd3","Type":"ContainerStarted","Data":"ff2abf7255e01dc2655d5eb76845882a623c2118302d3bbf9f99742d1587ba44"} Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.685240 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-dgbs4" event={"ID":"5f1cdbf0-65a0-4621-a4de-2337b8151bd3","Type":"ContainerStarted","Data":"23ca23c83f6e11b95dec17fc4d762d78f5af96a90d763c517c75951df2086bef"} Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.686062 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hc84k" event={"ID":"acdc25a7-3353-430f-b856-22a1259025ee","Type":"ContainerStarted","Data":"8c0fd80db2f419455eeefa0ccffbd4839f4c68065259cdc4c07a58d4e26d5d31"} Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.686734 5002 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-hc84k container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" start-of-body= Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.686769 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-hc84k" podUID="acdc25a7-3353-430f-b856-22a1259025ee" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.688002 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2jxnc" event={"ID":"f1a5276b-5811-4a19-8d31-a46a1c787d8a","Type":"ContainerStarted","Data":"9c3226d6ba1e43c99a95edce2981d867a5817b242aa6a4f386d5cf5a75f2c5f7"} Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.688070 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2jxnc" event={"ID":"f1a5276b-5811-4a19-8d31-a46a1c787d8a","Type":"ContainerStarted","Data":"6259ac74c7cb6a95fe25c5bc27b12ae2a7c703040a66f34d051b14d08d6e7151"} Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.688089 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2jxnc" Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.689124 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320575-h29rv" event={"ID":"0335170b-46ee-4cd3-aae5-694623192d49","Type":"ContainerStarted","Data":"84e129322ffd4f8541b7a55586bc94416e78bbd8fd3fb4407ccf393239fe2adc"} Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.690805 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-wqhm8" event={"ID":"c53fef56-513a-439c-a05a-cda26f9e3855","Type":"ContainerStarted","Data":"f403415e7821a105e85fab07dd9e83324cd500166e182ee0bdf514d7a0c37fa1"} Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.692911 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hns42" event={"ID":"14158f4f-292f-4693-9ed2-c7fa97452ecb","Type":"ContainerStarted","Data":"620cdc63dd38e4fde949d257df2d1995aa32b95a61c1ae993e2d188641f32eb7"} Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.694412 5002 patch_prober.go:28] interesting pod/downloads-7954f5f757-tdxkp container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.694736 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-tdxkp" podUID="54790eef-93bc-4d88-bcc5-3a2f6e1c1fa5" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.694877 5002 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-6tqs2 container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" start-of-body= Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.694984 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6tqs2" podUID="796cc797-549b-40ee-8bf9-66a8d9b54703" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.700904 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-h52ns" podStartSLOduration=8.700885864 podStartE2EDuration="8.700885864s" podCreationTimestamp="2025-09-30 12:22:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:53.699462544 +0000 UTC m=+147.949144690" watchObservedRunningTime="2025-09-30 12:22:53.700885864 +0000 UTC m=+147.950568000" Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.702175 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:53 crc kubenswrapper[5002]: E0930 12:22:53.702586 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:54.202573211 +0000 UTC m=+148.452255357 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.719913 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2lfhz" podStartSLOduration=123.719893777 podStartE2EDuration="2m3.719893777s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:53.718233061 +0000 UTC m=+147.967915207" watchObservedRunningTime="2025-09-30 12:22:53.719893777 +0000 UTC m=+147.969575923" Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.744930 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djckq" podStartSLOduration=123.744911089 podStartE2EDuration="2m3.744911089s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:53.742904902 +0000 UTC m=+147.992587048" watchObservedRunningTime="2025-09-30 12:22:53.744911089 +0000 UTC m=+147.994593235" Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.764992 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wtzqd" podStartSLOduration=123.764975562 podStartE2EDuration="2m3.764975562s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:53.762165373 +0000 UTC m=+148.011847519" watchObservedRunningTime="2025-09-30 12:22:53.764975562 +0000 UTC m=+148.014657698" Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.783360 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-l9vch" podStartSLOduration=123.783342797 podStartE2EDuration="2m3.783342797s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:53.781897796 +0000 UTC m=+148.031579942" watchObservedRunningTime="2025-09-30 12:22:53.783342797 +0000 UTC m=+148.033024943" Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.804203 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:53 crc kubenswrapper[5002]: E0930 12:22:53.807282 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:54.307264088 +0000 UTC m=+148.556946334 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.843402 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2jxnc" podStartSLOduration=123.843384961 podStartE2EDuration="2m3.843384961s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:53.843123284 +0000 UTC m=+148.092805430" watchObservedRunningTime="2025-09-30 12:22:53.843384961 +0000 UTC m=+148.093067107" Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.845108 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-88xml" podStartSLOduration=123.845101569 podStartE2EDuration="2m3.845101569s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:53.81410691 +0000 UTC m=+148.063789066" watchObservedRunningTime="2025-09-30 12:22:53.845101569 +0000 UTC m=+148.094783715" Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.860970 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hns42" podStartSLOduration=124.860952844 podStartE2EDuration="2m4.860952844s" podCreationTimestamp="2025-09-30 12:20:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:53.85939434 +0000 UTC m=+148.109076486" watchObservedRunningTime="2025-09-30 12:22:53.860952844 +0000 UTC m=+148.110634990" Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.878415 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k6j67" podStartSLOduration=123.878400773 podStartE2EDuration="2m3.878400773s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:53.876613023 +0000 UTC m=+148.126295179" watchObservedRunningTime="2025-09-30 12:22:53.878400773 +0000 UTC m=+148.128082919" Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.906226 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:53 crc kubenswrapper[5002]: E0930 12:22:53.906555 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:54.406540882 +0000 UTC m=+148.656223028 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:53 crc kubenswrapper[5002]: I0930 12:22:53.914740 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-dgbs4" podStartSLOduration=123.914708042 podStartE2EDuration="2m3.914708042s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:53.914701162 +0000 UTC m=+148.164383308" watchObservedRunningTime="2025-09-30 12:22:53.914708042 +0000 UTC m=+148.164390188" Sep 30 12:22:54 crc kubenswrapper[5002]: I0930 12:22:54.007730 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:54 crc kubenswrapper[5002]: E0930 12:22:54.008136 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:54.508117142 +0000 UTC m=+148.757799378 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:54 crc kubenswrapper[5002]: I0930 12:22:54.009955 5002 patch_prober.go:28] interesting pod/router-default-5444994796-6rw5f container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 12:22:54 crc kubenswrapper[5002]: [-]has-synced failed: reason withheld Sep 30 12:22:54 crc kubenswrapper[5002]: [+]process-running ok Sep 30 12:22:54 crc kubenswrapper[5002]: healthz check failed Sep 30 12:22:54 crc kubenswrapper[5002]: I0930 12:22:54.010006 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6rw5f" podUID="834344ed-4a8b-4a1e-be84-33622d21d0af" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 12:22:54 crc kubenswrapper[5002]: I0930 12:22:54.108671 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:54 crc kubenswrapper[5002]: E0930 12:22:54.108977 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:54.608962321 +0000 UTC m=+148.858644467 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:54 crc kubenswrapper[5002]: I0930 12:22:54.209906 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:54 crc kubenswrapper[5002]: E0930 12:22:54.210250 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:54.710232421 +0000 UTC m=+148.959914557 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:54 crc kubenswrapper[5002]: I0930 12:22:54.311330 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:54 crc kubenswrapper[5002]: E0930 12:22:54.311523 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:54.811497912 +0000 UTC m=+149.061180058 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:54 crc kubenswrapper[5002]: I0930 12:22:54.311738 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:54 crc kubenswrapper[5002]: E0930 12:22:54.312054 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:54.812039787 +0000 UTC m=+149.061721933 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:54 crc kubenswrapper[5002]: I0930 12:22:54.412625 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:54 crc kubenswrapper[5002]: E0930 12:22:54.413016 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:54.913001839 +0000 UTC m=+149.162683985 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:54 crc kubenswrapper[5002]: I0930 12:22:54.514167 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:54 crc kubenswrapper[5002]: E0930 12:22:54.514432 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:55.014420584 +0000 UTC m=+149.264102730 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:54 crc kubenswrapper[5002]: I0930 12:22:54.615176 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:54 crc kubenswrapper[5002]: E0930 12:22:54.615472 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:55.115455008 +0000 UTC m=+149.365137154 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:54 crc kubenswrapper[5002]: I0930 12:22:54.701465 5002 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-hc84k container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" start-of-body= Sep 30 12:22:54 crc kubenswrapper[5002]: I0930 12:22:54.701783 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-hc84k" podUID="acdc25a7-3353-430f-b856-22a1259025ee" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" Sep 30 12:22:54 crc kubenswrapper[5002]: I0930 12:22:54.705451 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hns42" Sep 30 12:22:54 crc kubenswrapper[5002]: I0930 12:22:54.710106 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9kzms" Sep 30 12:22:54 crc kubenswrapper[5002]: I0930 12:22:54.716778 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:54 crc kubenswrapper[5002]: E0930 12:22:54.717150 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:55.21713665 +0000 UTC m=+149.466818796 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:54 crc kubenswrapper[5002]: I0930 12:22:54.817614 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:54 crc kubenswrapper[5002]: E0930 12:22:54.817794 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:55.317767363 +0000 UTC m=+149.567449509 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:54 crc kubenswrapper[5002]: I0930 12:22:54.818195 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:54 crc kubenswrapper[5002]: I0930 12:22:54.818560 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:22:54 crc kubenswrapper[5002]: I0930 12:22:54.818725 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:22:54 crc kubenswrapper[5002]: I0930 12:22:54.818901 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:22:54 crc kubenswrapper[5002]: I0930 12:22:54.818932 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:22:54 crc kubenswrapper[5002]: E0930 12:22:54.819492 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:55.319466631 +0000 UTC m=+149.569148777 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:54 crc kubenswrapper[5002]: I0930 12:22:54.842290 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:22:54 crc kubenswrapper[5002]: I0930 12:22:54.842946 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:22:54 crc kubenswrapper[5002]: I0930 12:22:54.843462 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:22:54 crc kubenswrapper[5002]: I0930 12:22:54.852886 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:22:54 crc kubenswrapper[5002]: I0930 12:22:54.925962 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:54 crc kubenswrapper[5002]: E0930 12:22:54.926285 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:55.426268836 +0000 UTC m=+149.675950982 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.007068 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.020765 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.021766 5002 patch_prober.go:28] interesting pod/router-default-5444994796-6rw5f container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 12:22:55 crc kubenswrapper[5002]: [-]has-synced failed: reason withheld Sep 30 12:22:55 crc kubenswrapper[5002]: [+]process-running ok Sep 30 12:22:55 crc kubenswrapper[5002]: healthz check failed Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.021823 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6rw5f" podUID="834344ed-4a8b-4a1e-be84-33622d21d0af" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.027128 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:55 crc kubenswrapper[5002]: E0930 12:22:55.027437 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:55.527427065 +0000 UTC m=+149.777109211 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.031005 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.128189 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:55 crc kubenswrapper[5002]: E0930 12:22:55.128596 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:55.628575422 +0000 UTC m=+149.878257568 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.232374 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:55 crc kubenswrapper[5002]: E0930 12:22:55.232995 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:55.73298291 +0000 UTC m=+149.982665056 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.333806 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:55 crc kubenswrapper[5002]: E0930 12:22:55.334187 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:55.834173179 +0000 UTC m=+150.083855325 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:55 crc kubenswrapper[5002]: W0930 12:22:55.424155 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-f171c917344ed5fb97535fbad3852a51e3e9080e680e7dd7ac36733d3cd883a2 WatchSource:0}: Error finding container f171c917344ed5fb97535fbad3852a51e3e9080e680e7dd7ac36733d3cd883a2: Status 404 returned error can't find the container with id f171c917344ed5fb97535fbad3852a51e3e9080e680e7dd7ac36733d3cd883a2 Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.434828 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:55 crc kubenswrapper[5002]: E0930 12:22:55.435154 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:55.935142071 +0000 UTC m=+150.184824217 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.535678 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:55 crc kubenswrapper[5002]: E0930 12:22:55.535935 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:56.035920738 +0000 UTC m=+150.285602884 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:55 crc kubenswrapper[5002]: W0930 12:22:55.557950 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-2dc3a87000944b08651414d070a0ae0dbf3fdf60fb422a60b2fea21344b6b1ba WatchSource:0}: Error finding container 2dc3a87000944b08651414d070a0ae0dbf3fdf60fb422a60b2fea21344b6b1ba: Status 404 returned error can't find the container with id 2dc3a87000944b08651414d070a0ae0dbf3fdf60fb422a60b2fea21344b6b1ba Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.637263 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:55 crc kubenswrapper[5002]: E0930 12:22:55.637552 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 12:22:56.137541318 +0000 UTC m=+150.387223464 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zfcl9" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.718287 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"2dc3a87000944b08651414d070a0ae0dbf3fdf60fb422a60b2fea21344b6b1ba"} Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.720779 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"9f2585564504874d3ffd993aaeb333d42b218bbe84f4859c87cc182a87c23546"} Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.727381 5002 generic.go:334] "Generic (PLEG): container finished" podID="0335170b-46ee-4cd3-aae5-694623192d49" containerID="84e129322ffd4f8541b7a55586bc94416e78bbd8fd3fb4407ccf393239fe2adc" exitCode=0 Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.727434 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320575-h29rv" event={"ID":"0335170b-46ee-4cd3-aae5-694623192d49","Type":"ContainerDied","Data":"84e129322ffd4f8541b7a55586bc94416e78bbd8fd3fb4407ccf393239fe2adc"} Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.732904 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-znwgl" event={"ID":"2c6f790c-415f-46c9-baba-251cdc1e14b2","Type":"ContainerStarted","Data":"9e443a4ca21e953f05955f8f52d80d280f18c723e8aadf9174271a2935dc14b9"} Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.732957 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-znwgl" event={"ID":"2c6f790c-415f-46c9-baba-251cdc1e14b2","Type":"ContainerStarted","Data":"adf4aaabb0451194dd165a0d4a5af8821e0c22e72fdd45ae0eededdbed354284"} Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.738423 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:55 crc kubenswrapper[5002]: E0930 12:22:55.738783 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 12:22:56.238759208 +0000 UTC m=+150.488441354 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.742447 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"e4e2145b97eeaf32134b746ec9664341a468318bbdcdbac91a4823647a03e6df"} Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.742508 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"f171c917344ed5fb97535fbad3852a51e3e9080e680e7dd7ac36733d3cd883a2"} Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.744727 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.755374 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hns42" Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.771232 5002 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.811548 5002 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-09-30T12:22:55.771252539Z","Handler":null,"Name":""} Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.816809 5002 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.816852 5002 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.842768 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.852161 5002 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.852225 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.889214 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zfcl9\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.943678 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 12:22:55 crc kubenswrapper[5002]: I0930 12:22:55.957613 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.008371 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-ccfzv"] Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.009580 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ccfzv" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.011219 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.017499 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-6rw5f" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.018046 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-6rw5f" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.025033 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-6rw5f" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.036710 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ccfzv"] Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.145800 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c74574a-77fc-4f9f-93ff-ff2b64f27312-utilities\") pod \"certified-operators-ccfzv\" (UID: \"1c74574a-77fc-4f9f-93ff-ff2b64f27312\") " pod="openshift-marketplace/certified-operators-ccfzv" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.145857 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rf2nr\" (UniqueName: \"kubernetes.io/projected/1c74574a-77fc-4f9f-93ff-ff2b64f27312-kube-api-access-rf2nr\") pod \"certified-operators-ccfzv\" (UID: \"1c74574a-77fc-4f9f-93ff-ff2b64f27312\") " pod="openshift-marketplace/certified-operators-ccfzv" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.145919 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c74574a-77fc-4f9f-93ff-ff2b64f27312-catalog-content\") pod \"certified-operators-ccfzv\" (UID: \"1c74574a-77fc-4f9f-93ff-ff2b64f27312\") " pod="openshift-marketplace/certified-operators-ccfzv" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.157790 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.208991 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5w8h8"] Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.210301 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5w8h8" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.212846 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.220776 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5w8h8"] Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.247807 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c74574a-77fc-4f9f-93ff-ff2b64f27312-catalog-content\") pod \"certified-operators-ccfzv\" (UID: \"1c74574a-77fc-4f9f-93ff-ff2b64f27312\") " pod="openshift-marketplace/certified-operators-ccfzv" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.248125 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c74574a-77fc-4f9f-93ff-ff2b64f27312-utilities\") pod \"certified-operators-ccfzv\" (UID: \"1c74574a-77fc-4f9f-93ff-ff2b64f27312\") " pod="openshift-marketplace/certified-operators-ccfzv" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.248195 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rf2nr\" (UniqueName: \"kubernetes.io/projected/1c74574a-77fc-4f9f-93ff-ff2b64f27312-kube-api-access-rf2nr\") pod \"certified-operators-ccfzv\" (UID: \"1c74574a-77fc-4f9f-93ff-ff2b64f27312\") " pod="openshift-marketplace/certified-operators-ccfzv" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.248898 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c74574a-77fc-4f9f-93ff-ff2b64f27312-catalog-content\") pod \"certified-operators-ccfzv\" (UID: \"1c74574a-77fc-4f9f-93ff-ff2b64f27312\") " pod="openshift-marketplace/certified-operators-ccfzv" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.248960 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c74574a-77fc-4f9f-93ff-ff2b64f27312-utilities\") pod \"certified-operators-ccfzv\" (UID: \"1c74574a-77fc-4f9f-93ff-ff2b64f27312\") " pod="openshift-marketplace/certified-operators-ccfzv" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.281788 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rf2nr\" (UniqueName: \"kubernetes.io/projected/1c74574a-77fc-4f9f-93ff-ff2b64f27312-kube-api-access-rf2nr\") pod \"certified-operators-ccfzv\" (UID: \"1c74574a-77fc-4f9f-93ff-ff2b64f27312\") " pod="openshift-marketplace/certified-operators-ccfzv" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.326203 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ccfzv" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.349983 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mplt\" (UniqueName: \"kubernetes.io/projected/00884b54-ca12-4a91-a28b-fcd78a870b68-kube-api-access-4mplt\") pod \"community-operators-5w8h8\" (UID: \"00884b54-ca12-4a91-a28b-fcd78a870b68\") " pod="openshift-marketplace/community-operators-5w8h8" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.350047 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00884b54-ca12-4a91-a28b-fcd78a870b68-utilities\") pod \"community-operators-5w8h8\" (UID: \"00884b54-ca12-4a91-a28b-fcd78a870b68\") " pod="openshift-marketplace/community-operators-5w8h8" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.350095 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00884b54-ca12-4a91-a28b-fcd78a870b68-catalog-content\") pod \"community-operators-5w8h8\" (UID: \"00884b54-ca12-4a91-a28b-fcd78a870b68\") " pod="openshift-marketplace/community-operators-5w8h8" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.389447 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zfcl9"] Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.417070 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tbfr4"] Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.419080 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tbfr4" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.426942 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tbfr4"] Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.452611 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00884b54-ca12-4a91-a28b-fcd78a870b68-utilities\") pod \"community-operators-5w8h8\" (UID: \"00884b54-ca12-4a91-a28b-fcd78a870b68\") " pod="openshift-marketplace/community-operators-5w8h8" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.452652 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00884b54-ca12-4a91-a28b-fcd78a870b68-catalog-content\") pod \"community-operators-5w8h8\" (UID: \"00884b54-ca12-4a91-a28b-fcd78a870b68\") " pod="openshift-marketplace/community-operators-5w8h8" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.452723 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mplt\" (UniqueName: \"kubernetes.io/projected/00884b54-ca12-4a91-a28b-fcd78a870b68-kube-api-access-4mplt\") pod \"community-operators-5w8h8\" (UID: \"00884b54-ca12-4a91-a28b-fcd78a870b68\") " pod="openshift-marketplace/community-operators-5w8h8" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.453311 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00884b54-ca12-4a91-a28b-fcd78a870b68-utilities\") pod \"community-operators-5w8h8\" (UID: \"00884b54-ca12-4a91-a28b-fcd78a870b68\") " pod="openshift-marketplace/community-operators-5w8h8" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.453605 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00884b54-ca12-4a91-a28b-fcd78a870b68-catalog-content\") pod \"community-operators-5w8h8\" (UID: \"00884b54-ca12-4a91-a28b-fcd78a870b68\") " pod="openshift-marketplace/community-operators-5w8h8" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.476973 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mplt\" (UniqueName: \"kubernetes.io/projected/00884b54-ca12-4a91-a28b-fcd78a870b68-kube-api-access-4mplt\") pod \"community-operators-5w8h8\" (UID: \"00884b54-ca12-4a91-a28b-fcd78a870b68\") " pod="openshift-marketplace/community-operators-5w8h8" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.524846 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5w8h8" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.556154 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559-utilities\") pod \"certified-operators-tbfr4\" (UID: \"78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559\") " pod="openshift-marketplace/certified-operators-tbfr4" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.556207 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559-catalog-content\") pod \"certified-operators-tbfr4\" (UID: \"78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559\") " pod="openshift-marketplace/certified-operators-tbfr4" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.556265 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6pdc\" (UniqueName: \"kubernetes.io/projected/78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559-kube-api-access-g6pdc\") pod \"certified-operators-tbfr4\" (UID: \"78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559\") " pod="openshift-marketplace/certified-operators-tbfr4" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.606503 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ccfzv"] Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.612423 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7bvwm"] Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.613774 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7bvwm" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.622354 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7bvwm"] Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.657521 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxj5m\" (UniqueName: \"kubernetes.io/projected/c2314206-9216-42a7-a942-5be7b30543be-kube-api-access-rxj5m\") pod \"community-operators-7bvwm\" (UID: \"c2314206-9216-42a7-a942-5be7b30543be\") " pod="openshift-marketplace/community-operators-7bvwm" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.657595 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2314206-9216-42a7-a942-5be7b30543be-catalog-content\") pod \"community-operators-7bvwm\" (UID: \"c2314206-9216-42a7-a942-5be7b30543be\") " pod="openshift-marketplace/community-operators-7bvwm" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.657679 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559-utilities\") pod \"certified-operators-tbfr4\" (UID: \"78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559\") " pod="openshift-marketplace/certified-operators-tbfr4" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.657710 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559-catalog-content\") pod \"certified-operators-tbfr4\" (UID: \"78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559\") " pod="openshift-marketplace/certified-operators-tbfr4" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.657738 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6pdc\" (UniqueName: \"kubernetes.io/projected/78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559-kube-api-access-g6pdc\") pod \"certified-operators-tbfr4\" (UID: \"78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559\") " pod="openshift-marketplace/certified-operators-tbfr4" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.657854 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2314206-9216-42a7-a942-5be7b30543be-utilities\") pod \"community-operators-7bvwm\" (UID: \"c2314206-9216-42a7-a942-5be7b30543be\") " pod="openshift-marketplace/community-operators-7bvwm" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.658243 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559-utilities\") pod \"certified-operators-tbfr4\" (UID: \"78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559\") " pod="openshift-marketplace/certified-operators-tbfr4" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.658290 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559-catalog-content\") pod \"certified-operators-tbfr4\" (UID: \"78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559\") " pod="openshift-marketplace/certified-operators-tbfr4" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.684377 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6pdc\" (UniqueName: \"kubernetes.io/projected/78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559-kube-api-access-g6pdc\") pod \"certified-operators-tbfr4\" (UID: \"78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559\") " pod="openshift-marketplace/certified-operators-tbfr4" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.684704 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.746465 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tbfr4" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.755138 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"19d990f58c880fbf081b737656f92673dd125366eb2204e6de85bd52644571d3"} Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.758469 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxj5m\" (UniqueName: \"kubernetes.io/projected/c2314206-9216-42a7-a942-5be7b30543be-kube-api-access-rxj5m\") pod \"community-operators-7bvwm\" (UID: \"c2314206-9216-42a7-a942-5be7b30543be\") " pod="openshift-marketplace/community-operators-7bvwm" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.758604 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2314206-9216-42a7-a942-5be7b30543be-catalog-content\") pod \"community-operators-7bvwm\" (UID: \"c2314206-9216-42a7-a942-5be7b30543be\") " pod="openshift-marketplace/community-operators-7bvwm" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.758691 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2314206-9216-42a7-a942-5be7b30543be-utilities\") pod \"community-operators-7bvwm\" (UID: \"c2314206-9216-42a7-a942-5be7b30543be\") " pod="openshift-marketplace/community-operators-7bvwm" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.759320 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2314206-9216-42a7-a942-5be7b30543be-utilities\") pod \"community-operators-7bvwm\" (UID: \"c2314206-9216-42a7-a942-5be7b30543be\") " pod="openshift-marketplace/community-operators-7bvwm" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.759546 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" event={"ID":"048223bb-4ff9-41e0-9b78-457a720ff399","Type":"ContainerStarted","Data":"d762565a981249cbcc25e00d9edcfde22446050681650e2fd5ba15e6325bf730"} Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.759594 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" event={"ID":"048223bb-4ff9-41e0-9b78-457a720ff399","Type":"ContainerStarted","Data":"2789dab23b8c0aa1c2118197e37824969da42811b379310113f6b92c487cac48"} Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.760101 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.762886 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2314206-9216-42a7-a942-5be7b30543be-catalog-content\") pod \"community-operators-7bvwm\" (UID: \"c2314206-9216-42a7-a942-5be7b30543be\") " pod="openshift-marketplace/community-operators-7bvwm" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.777285 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"2716d5f3622896b24b531313f65fc5a5ea7513adc53b7639d30bf10b35124324"} Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.780217 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxj5m\" (UniqueName: \"kubernetes.io/projected/c2314206-9216-42a7-a942-5be7b30543be-kube-api-access-rxj5m\") pod \"community-operators-7bvwm\" (UID: \"c2314206-9216-42a7-a942-5be7b30543be\") " pod="openshift-marketplace/community-operators-7bvwm" Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.780668 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-znwgl" event={"ID":"2c6f790c-415f-46c9-baba-251cdc1e14b2","Type":"ContainerStarted","Data":"819241bc29a1a28f360131577364c6a925411fa88079c09a61aaf5bc8d167363"} Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.785466 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ccfzv" event={"ID":"1c74574a-77fc-4f9f-93ff-ff2b64f27312","Type":"ContainerStarted","Data":"16a472e95c5dd33216af5c89a96fb06e05e871d1082ed7f2d3406dbd2161ecc4"} Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.793096 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5w8h8"] Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.793823 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" podStartSLOduration=126.793557616 podStartE2EDuration="2m6.793557616s" podCreationTimestamp="2025-09-30 12:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:56.791399775 +0000 UTC m=+151.041081931" watchObservedRunningTime="2025-09-30 12:22:56.793557616 +0000 UTC m=+151.043239762" Sep 30 12:22:56 crc kubenswrapper[5002]: W0930 12:22:56.800822 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod00884b54_ca12_4a91_a28b_fcd78a870b68.slice/crio-878833d0673261a2dc48c8344eaf110379ab7daa366463e40a590357f77c256f WatchSource:0}: Error finding container 878833d0673261a2dc48c8344eaf110379ab7daa366463e40a590357f77c256f: Status 404 returned error can't find the container with id 878833d0673261a2dc48c8344eaf110379ab7daa366463e40a590357f77c256f Sep 30 12:22:56 crc kubenswrapper[5002]: I0930 12:22:56.932784 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7bvwm" Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.169950 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320575-h29rv" Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.182892 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-znwgl" podStartSLOduration=12.182877136 podStartE2EDuration="12.182877136s" podCreationTimestamp="2025-09-30 12:22:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:22:56.854105404 +0000 UTC m=+151.103787550" watchObservedRunningTime="2025-09-30 12:22:57.182877136 +0000 UTC m=+151.432559272" Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.184041 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7bvwm"] Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.186052 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 30 12:22:57 crc kubenswrapper[5002]: E0930 12:22:57.186240 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0335170b-46ee-4cd3-aae5-694623192d49" containerName="collect-profiles" Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.186251 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="0335170b-46ee-4cd3-aae5-694623192d49" containerName="collect-profiles" Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.186362 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="0335170b-46ee-4cd3-aae5-694623192d49" containerName="collect-profiles" Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.186690 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.189773 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.190067 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.201108 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.268207 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dwrvp\" (UniqueName: \"kubernetes.io/projected/0335170b-46ee-4cd3-aae5-694623192d49-kube-api-access-dwrvp\") pod \"0335170b-46ee-4cd3-aae5-694623192d49\" (UID: \"0335170b-46ee-4cd3-aae5-694623192d49\") " Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.268283 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0335170b-46ee-4cd3-aae5-694623192d49-secret-volume\") pod \"0335170b-46ee-4cd3-aae5-694623192d49\" (UID: \"0335170b-46ee-4cd3-aae5-694623192d49\") " Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.268345 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0335170b-46ee-4cd3-aae5-694623192d49-config-volume\") pod \"0335170b-46ee-4cd3-aae5-694623192d49\" (UID: \"0335170b-46ee-4cd3-aae5-694623192d49\") " Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.268753 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tbfr4"] Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.268779 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4de266de-62cd-4654-9083-f100649e46b7-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"4de266de-62cd-4654-9083-f100649e46b7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.268812 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4de266de-62cd-4654-9083-f100649e46b7-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"4de266de-62cd-4654-9083-f100649e46b7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.273350 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0335170b-46ee-4cd3-aae5-694623192d49-config-volume" (OuterVolumeSpecName: "config-volume") pod "0335170b-46ee-4cd3-aae5-694623192d49" (UID: "0335170b-46ee-4cd3-aae5-694623192d49"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.278885 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0335170b-46ee-4cd3-aae5-694623192d49-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "0335170b-46ee-4cd3-aae5-694623192d49" (UID: "0335170b-46ee-4cd3-aae5-694623192d49"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.281694 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0335170b-46ee-4cd3-aae5-694623192d49-kube-api-access-dwrvp" (OuterVolumeSpecName: "kube-api-access-dwrvp") pod "0335170b-46ee-4cd3-aae5-694623192d49" (UID: "0335170b-46ee-4cd3-aae5-694623192d49"). InnerVolumeSpecName "kube-api-access-dwrvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.369928 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4de266de-62cd-4654-9083-f100649e46b7-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"4de266de-62cd-4654-9083-f100649e46b7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.369979 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4de266de-62cd-4654-9083-f100649e46b7-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"4de266de-62cd-4654-9083-f100649e46b7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.370085 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dwrvp\" (UniqueName: \"kubernetes.io/projected/0335170b-46ee-4cd3-aae5-694623192d49-kube-api-access-dwrvp\") on node \"crc\" DevicePath \"\"" Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.370099 5002 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0335170b-46ee-4cd3-aae5-694623192d49-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.370109 5002 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0335170b-46ee-4cd3-aae5-694623192d49-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.370101 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4de266de-62cd-4654-9083-f100649e46b7-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"4de266de-62cd-4654-9083-f100649e46b7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.386639 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4de266de-62cd-4654-9083-f100649e46b7-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"4de266de-62cd-4654-9083-f100649e46b7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.524548 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.706685 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 30 12:22:57 crc kubenswrapper[5002]: W0930 12:22:57.718406 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod4de266de_62cd_4654_9083_f100649e46b7.slice/crio-1ca6cd558a97e9f7c36b6251eb4a1500480c13e70417037534f52f1104a7389b WatchSource:0}: Error finding container 1ca6cd558a97e9f7c36b6251eb4a1500480c13e70417037534f52f1104a7389b: Status 404 returned error can't find the container with id 1ca6cd558a97e9f7c36b6251eb4a1500480c13e70417037534f52f1104a7389b Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.792723 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"4de266de-62cd-4654-9083-f100649e46b7","Type":"ContainerStarted","Data":"1ca6cd558a97e9f7c36b6251eb4a1500480c13e70417037534f52f1104a7389b"} Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.797584 5002 generic.go:334] "Generic (PLEG): container finished" podID="00884b54-ca12-4a91-a28b-fcd78a870b68" containerID="62934988a4d75b6af15a3581f0256d0e90ff0740b44d3ab4d4d7c82fa739dda1" exitCode=0 Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.797674 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5w8h8" event={"ID":"00884b54-ca12-4a91-a28b-fcd78a870b68","Type":"ContainerDied","Data":"62934988a4d75b6af15a3581f0256d0e90ff0740b44d3ab4d4d7c82fa739dda1"} Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.797712 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5w8h8" event={"ID":"00884b54-ca12-4a91-a28b-fcd78a870b68","Type":"ContainerStarted","Data":"878833d0673261a2dc48c8344eaf110379ab7daa366463e40a590357f77c256f"} Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.801934 5002 generic.go:334] "Generic (PLEG): container finished" podID="1c74574a-77fc-4f9f-93ff-ff2b64f27312" containerID="f0bfb6913d2ed978286c961c84b792b142b5768081d5240f9fd1617b85d8ad25" exitCode=0 Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.802014 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ccfzv" event={"ID":"1c74574a-77fc-4f9f-93ff-ff2b64f27312","Type":"ContainerDied","Data":"f0bfb6913d2ed978286c961c84b792b142b5768081d5240f9fd1617b85d8ad25"} Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.803517 5002 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.816003 5002 generic.go:334] "Generic (PLEG): container finished" podID="c2314206-9216-42a7-a942-5be7b30543be" containerID="352909dadc59b2f01c68acee6a9df4d173191de2b99d0d3f9606300c6d025af0" exitCode=0 Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.816079 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7bvwm" event={"ID":"c2314206-9216-42a7-a942-5be7b30543be","Type":"ContainerDied","Data":"352909dadc59b2f01c68acee6a9df4d173191de2b99d0d3f9606300c6d025af0"} Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.816111 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7bvwm" event={"ID":"c2314206-9216-42a7-a942-5be7b30543be","Type":"ContainerStarted","Data":"e591b6bf63258756e3af2145f0af464bbc2311224ee04e9c03174a4228a2b262"} Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.823074 5002 generic.go:334] "Generic (PLEG): container finished" podID="78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559" containerID="6ef344b3df2e43ae4b811e90d577522e10ff355b9390c6b085c0fe122c505d3e" exitCode=0 Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.823154 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tbfr4" event={"ID":"78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559","Type":"ContainerDied","Data":"6ef344b3df2e43ae4b811e90d577522e10ff355b9390c6b085c0fe122c505d3e"} Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.824129 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tbfr4" event={"ID":"78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559","Type":"ContainerStarted","Data":"ea4df70ac60be647b1b7f059902d1318c1abcd554d7b34f0bbdc30e230dd4fd2"} Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.827695 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320575-h29rv" Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.832876 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320575-h29rv" event={"ID":"0335170b-46ee-4cd3-aae5-694623192d49","Type":"ContainerDied","Data":"e5f30a7df10e71d5ace83c67ac0a4f56ccf3103b223a855bc4d9b2cadaf9c552"} Sep 30 12:22:57 crc kubenswrapper[5002]: I0930 12:22:57.832915 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e5f30a7df10e71d5ace83c67ac0a4f56ccf3103b223a855bc4d9b2cadaf9c552" Sep 30 12:22:57 crc kubenswrapper[5002]: E0930 12:22:57.903873 5002 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0335170b_46ee_4cd3_aae5_694623192d49.slice/crio-e5f30a7df10e71d5ace83c67ac0a4f56ccf3103b223a855bc4d9b2cadaf9c552\": RecentStats: unable to find data in memory cache]" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.207789 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fr4sd"] Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.208668 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fr4sd" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.213928 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.218531 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fr4sd"] Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.226000 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.229901 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pzv7v" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.284766 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a6520d7-b4d9-4837-a574-48cb86ee6231-catalog-content\") pod \"redhat-marketplace-fr4sd\" (UID: \"7a6520d7-b4d9-4837-a574-48cb86ee6231\") " pod="openshift-marketplace/redhat-marketplace-fr4sd" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.284831 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a6520d7-b4d9-4837-a574-48cb86ee6231-utilities\") pod \"redhat-marketplace-fr4sd\" (UID: \"7a6520d7-b4d9-4837-a574-48cb86ee6231\") " pod="openshift-marketplace/redhat-marketplace-fr4sd" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.284893 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vw9t5\" (UniqueName: \"kubernetes.io/projected/7a6520d7-b4d9-4837-a574-48cb86ee6231-kube-api-access-vw9t5\") pod \"redhat-marketplace-fr4sd\" (UID: \"7a6520d7-b4d9-4837-a574-48cb86ee6231\") " pod="openshift-marketplace/redhat-marketplace-fr4sd" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.385874 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a6520d7-b4d9-4837-a574-48cb86ee6231-catalog-content\") pod \"redhat-marketplace-fr4sd\" (UID: \"7a6520d7-b4d9-4837-a574-48cb86ee6231\") " pod="openshift-marketplace/redhat-marketplace-fr4sd" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.385926 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a6520d7-b4d9-4837-a574-48cb86ee6231-utilities\") pod \"redhat-marketplace-fr4sd\" (UID: \"7a6520d7-b4d9-4837-a574-48cb86ee6231\") " pod="openshift-marketplace/redhat-marketplace-fr4sd" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.385964 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vw9t5\" (UniqueName: \"kubernetes.io/projected/7a6520d7-b4d9-4837-a574-48cb86ee6231-kube-api-access-vw9t5\") pod \"redhat-marketplace-fr4sd\" (UID: \"7a6520d7-b4d9-4837-a574-48cb86ee6231\") " pod="openshift-marketplace/redhat-marketplace-fr4sd" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.386627 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a6520d7-b4d9-4837-a574-48cb86ee6231-catalog-content\") pod \"redhat-marketplace-fr4sd\" (UID: \"7a6520d7-b4d9-4837-a574-48cb86ee6231\") " pod="openshift-marketplace/redhat-marketplace-fr4sd" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.386834 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a6520d7-b4d9-4837-a574-48cb86ee6231-utilities\") pod \"redhat-marketplace-fr4sd\" (UID: \"7a6520d7-b4d9-4837-a574-48cb86ee6231\") " pod="openshift-marketplace/redhat-marketplace-fr4sd" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.405755 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vw9t5\" (UniqueName: \"kubernetes.io/projected/7a6520d7-b4d9-4837-a574-48cb86ee6231-kube-api-access-vw9t5\") pod \"redhat-marketplace-fr4sd\" (UID: \"7a6520d7-b4d9-4837-a574-48cb86ee6231\") " pod="openshift-marketplace/redhat-marketplace-fr4sd" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.548669 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fr4sd" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.604177 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-9vlx8"] Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.605113 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9vlx8" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.622225 5002 patch_prober.go:28] interesting pod/downloads-7954f5f757-tdxkp container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.622273 5002 patch_prober.go:28] interesting pod/downloads-7954f5f757-tdxkp container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.622329 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-tdxkp" podUID="54790eef-93bc-4d88-bcc5-3a2f6e1c1fa5" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.622278 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-tdxkp" podUID="54790eef-93bc-4d88-bcc5-3a2f6e1c1fa5" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.641017 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9vlx8"] Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.647729 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-79sft" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.647773 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-79sft" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.651729 5002 patch_prober.go:28] interesting pod/console-f9d7485db-79sft container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.25:8443/health\": dial tcp 10.217.0.25:8443: connect: connection refused" start-of-body= Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.651780 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-79sft" podUID="7151e9d9-4417-40bb-aac5-8f838065aa79" containerName="console" probeResult="failure" output="Get \"https://10.217.0.25:8443/health\": dial tcp 10.217.0.25:8443: connect: connection refused" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.695858 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b-catalog-content\") pod \"redhat-marketplace-9vlx8\" (UID: \"e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b\") " pod="openshift-marketplace/redhat-marketplace-9vlx8" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.696409 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkqtp\" (UniqueName: \"kubernetes.io/projected/e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b-kube-api-access-jkqtp\") pod \"redhat-marketplace-9vlx8\" (UID: \"e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b\") " pod="openshift-marketplace/redhat-marketplace-9vlx8" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.696447 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b-utilities\") pod \"redhat-marketplace-9vlx8\" (UID: \"e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b\") " pod="openshift-marketplace/redhat-marketplace-9vlx8" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.802274 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b-catalog-content\") pod \"redhat-marketplace-9vlx8\" (UID: \"e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b\") " pod="openshift-marketplace/redhat-marketplace-9vlx8" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.802321 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkqtp\" (UniqueName: \"kubernetes.io/projected/e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b-kube-api-access-jkqtp\") pod \"redhat-marketplace-9vlx8\" (UID: \"e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b\") " pod="openshift-marketplace/redhat-marketplace-9vlx8" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.802350 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b-utilities\") pod \"redhat-marketplace-9vlx8\" (UID: \"e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b\") " pod="openshift-marketplace/redhat-marketplace-9vlx8" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.803004 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b-catalog-content\") pod \"redhat-marketplace-9vlx8\" (UID: \"e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b\") " pod="openshift-marketplace/redhat-marketplace-9vlx8" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.803057 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b-utilities\") pod \"redhat-marketplace-9vlx8\" (UID: \"e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b\") " pod="openshift-marketplace/redhat-marketplace-9vlx8" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.827360 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fr4sd"] Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.839642 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkqtp\" (UniqueName: \"kubernetes.io/projected/e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b-kube-api-access-jkqtp\") pod \"redhat-marketplace-9vlx8\" (UID: \"e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b\") " pod="openshift-marketplace/redhat-marketplace-9vlx8" Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.842645 5002 generic.go:334] "Generic (PLEG): container finished" podID="4de266de-62cd-4654-9083-f100649e46b7" containerID="6c486bdc8f48095c86f001b41c3c24400ac7c3816a647c5e195ae47dd9644792" exitCode=0 Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.842703 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"4de266de-62cd-4654-9083-f100649e46b7","Type":"ContainerDied","Data":"6c486bdc8f48095c86f001b41c3c24400ac7c3816a647c5e195ae47dd9644792"} Sep 30 12:22:58 crc kubenswrapper[5002]: I0930 12:22:58.952827 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9vlx8" Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.112257 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6tqs2" Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.207361 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rhw4l"] Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.208674 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rhw4l" Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.213219 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.220733 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rhw4l"] Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.310130 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30cf48ad-b568-4511-9877-bad2837d969e-catalog-content\") pod \"redhat-operators-rhw4l\" (UID: \"30cf48ad-b568-4511-9877-bad2837d969e\") " pod="openshift-marketplace/redhat-operators-rhw4l" Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.310168 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30cf48ad-b568-4511-9877-bad2837d969e-utilities\") pod \"redhat-operators-rhw4l\" (UID: \"30cf48ad-b568-4511-9877-bad2837d969e\") " pod="openshift-marketplace/redhat-operators-rhw4l" Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.310237 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vglg\" (UniqueName: \"kubernetes.io/projected/30cf48ad-b568-4511-9877-bad2837d969e-kube-api-access-9vglg\") pod \"redhat-operators-rhw4l\" (UID: \"30cf48ad-b568-4511-9877-bad2837d969e\") " pod="openshift-marketplace/redhat-operators-rhw4l" Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.321071 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9vlx8"] Sep 30 12:22:59 crc kubenswrapper[5002]: W0930 12:22:59.341681 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode5ef4b78_d57a_4e71_b275_ac5a2ca7ab0b.slice/crio-8e9c540b9c45b46965d830a68d013366f502af50c4789ceabf951b68b9d364ed WatchSource:0}: Error finding container 8e9c540b9c45b46965d830a68d013366f502af50c4789ceabf951b68b9d364ed: Status 404 returned error can't find the container with id 8e9c540b9c45b46965d830a68d013366f502af50c4789ceabf951b68b9d364ed Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.411255 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30cf48ad-b568-4511-9877-bad2837d969e-catalog-content\") pod \"redhat-operators-rhw4l\" (UID: \"30cf48ad-b568-4511-9877-bad2837d969e\") " pod="openshift-marketplace/redhat-operators-rhw4l" Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.411304 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30cf48ad-b568-4511-9877-bad2837d969e-utilities\") pod \"redhat-operators-rhw4l\" (UID: \"30cf48ad-b568-4511-9877-bad2837d969e\") " pod="openshift-marketplace/redhat-operators-rhw4l" Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.411390 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vglg\" (UniqueName: \"kubernetes.io/projected/30cf48ad-b568-4511-9877-bad2837d969e-kube-api-access-9vglg\") pod \"redhat-operators-rhw4l\" (UID: \"30cf48ad-b568-4511-9877-bad2837d969e\") " pod="openshift-marketplace/redhat-operators-rhw4l" Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.412089 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30cf48ad-b568-4511-9877-bad2837d969e-utilities\") pod \"redhat-operators-rhw4l\" (UID: \"30cf48ad-b568-4511-9877-bad2837d969e\") " pod="openshift-marketplace/redhat-operators-rhw4l" Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.412122 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30cf48ad-b568-4511-9877-bad2837d969e-catalog-content\") pod \"redhat-operators-rhw4l\" (UID: \"30cf48ad-b568-4511-9877-bad2837d969e\") " pod="openshift-marketplace/redhat-operators-rhw4l" Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.439436 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-hc84k" Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.442421 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vglg\" (UniqueName: \"kubernetes.io/projected/30cf48ad-b568-4511-9877-bad2837d969e-kube-api-access-9vglg\") pod \"redhat-operators-rhw4l\" (UID: \"30cf48ad-b568-4511-9877-bad2837d969e\") " pod="openshift-marketplace/redhat-operators-rhw4l" Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.537772 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rhw4l" Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.625558 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tbphh"] Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.626848 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tbphh" Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.636311 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tbphh"] Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.722732 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mghzq\" (UniqueName: \"kubernetes.io/projected/4b09a25e-9b24-4ea4-a5a2-d3155a125d36-kube-api-access-mghzq\") pod \"redhat-operators-tbphh\" (UID: \"4b09a25e-9b24-4ea4-a5a2-d3155a125d36\") " pod="openshift-marketplace/redhat-operators-tbphh" Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.722793 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b09a25e-9b24-4ea4-a5a2-d3155a125d36-utilities\") pod \"redhat-operators-tbphh\" (UID: \"4b09a25e-9b24-4ea4-a5a2-d3155a125d36\") " pod="openshift-marketplace/redhat-operators-tbphh" Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.722886 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b09a25e-9b24-4ea4-a5a2-d3155a125d36-catalog-content\") pod \"redhat-operators-tbphh\" (UID: \"4b09a25e-9b24-4ea4-a5a2-d3155a125d36\") " pod="openshift-marketplace/redhat-operators-tbphh" Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.824234 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mghzq\" (UniqueName: \"kubernetes.io/projected/4b09a25e-9b24-4ea4-a5a2-d3155a125d36-kube-api-access-mghzq\") pod \"redhat-operators-tbphh\" (UID: \"4b09a25e-9b24-4ea4-a5a2-d3155a125d36\") " pod="openshift-marketplace/redhat-operators-tbphh" Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.824277 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b09a25e-9b24-4ea4-a5a2-d3155a125d36-utilities\") pod \"redhat-operators-tbphh\" (UID: \"4b09a25e-9b24-4ea4-a5a2-d3155a125d36\") " pod="openshift-marketplace/redhat-operators-tbphh" Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.824315 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b09a25e-9b24-4ea4-a5a2-d3155a125d36-catalog-content\") pod \"redhat-operators-tbphh\" (UID: \"4b09a25e-9b24-4ea4-a5a2-d3155a125d36\") " pod="openshift-marketplace/redhat-operators-tbphh" Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.824749 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b09a25e-9b24-4ea4-a5a2-d3155a125d36-catalog-content\") pod \"redhat-operators-tbphh\" (UID: \"4b09a25e-9b24-4ea4-a5a2-d3155a125d36\") " pod="openshift-marketplace/redhat-operators-tbphh" Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.824950 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b09a25e-9b24-4ea4-a5a2-d3155a125d36-utilities\") pod \"redhat-operators-tbphh\" (UID: \"4b09a25e-9b24-4ea4-a5a2-d3155a125d36\") " pod="openshift-marketplace/redhat-operators-tbphh" Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.855298 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mghzq\" (UniqueName: \"kubernetes.io/projected/4b09a25e-9b24-4ea4-a5a2-d3155a125d36-kube-api-access-mghzq\") pod \"redhat-operators-tbphh\" (UID: \"4b09a25e-9b24-4ea4-a5a2-d3155a125d36\") " pod="openshift-marketplace/redhat-operators-tbphh" Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.865339 5002 generic.go:334] "Generic (PLEG): container finished" podID="7a6520d7-b4d9-4837-a574-48cb86ee6231" containerID="63ffcd041f05f330eb10eacf002772c3351d4f2c9009428e298aa4adc3c8f1a9" exitCode=0 Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.865414 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fr4sd" event={"ID":"7a6520d7-b4d9-4837-a574-48cb86ee6231","Type":"ContainerDied","Data":"63ffcd041f05f330eb10eacf002772c3351d4f2c9009428e298aa4adc3c8f1a9"} Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.865445 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fr4sd" event={"ID":"7a6520d7-b4d9-4837-a574-48cb86ee6231","Type":"ContainerStarted","Data":"6f66e4c9389dfaf2b19fd9ef735675b9426a0ebb4d952da8d4beeb0a58c999da"} Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.868959 5002 generic.go:334] "Generic (PLEG): container finished" podID="e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b" containerID="250676fd4c16e59bbef337958ae60887d687d64b006ae37a25aed514bd5c9b15" exitCode=0 Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.869040 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9vlx8" event={"ID":"e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b","Type":"ContainerDied","Data":"250676fd4c16e59bbef337958ae60887d687d64b006ae37a25aed514bd5c9b15"} Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.869062 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9vlx8" event={"ID":"e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b","Type":"ContainerStarted","Data":"8e9c540b9c45b46965d830a68d013366f502af50c4789ceabf951b68b9d364ed"} Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.964993 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tbphh" Sep 30 12:22:59 crc kubenswrapper[5002]: I0930 12:22:59.968573 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rhw4l"] Sep 30 12:23:00 crc kubenswrapper[5002]: I0930 12:23:00.152795 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 12:23:00 crc kubenswrapper[5002]: I0930 12:23:00.334055 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4de266de-62cd-4654-9083-f100649e46b7-kube-api-access\") pod \"4de266de-62cd-4654-9083-f100649e46b7\" (UID: \"4de266de-62cd-4654-9083-f100649e46b7\") " Sep 30 12:23:00 crc kubenswrapper[5002]: I0930 12:23:00.334114 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4de266de-62cd-4654-9083-f100649e46b7-kubelet-dir\") pod \"4de266de-62cd-4654-9083-f100649e46b7\" (UID: \"4de266de-62cd-4654-9083-f100649e46b7\") " Sep 30 12:23:00 crc kubenswrapper[5002]: I0930 12:23:00.334235 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4de266de-62cd-4654-9083-f100649e46b7-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "4de266de-62cd-4654-9083-f100649e46b7" (UID: "4de266de-62cd-4654-9083-f100649e46b7"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 12:23:00 crc kubenswrapper[5002]: I0930 12:23:00.334635 5002 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4de266de-62cd-4654-9083-f100649e46b7-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 30 12:23:00 crc kubenswrapper[5002]: I0930 12:23:00.339508 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4de266de-62cd-4654-9083-f100649e46b7-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "4de266de-62cd-4654-9083-f100649e46b7" (UID: "4de266de-62cd-4654-9083-f100649e46b7"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:23:00 crc kubenswrapper[5002]: I0930 12:23:00.435574 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4de266de-62cd-4654-9083-f100649e46b7-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 12:23:00 crc kubenswrapper[5002]: I0930 12:23:00.465549 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tbphh"] Sep 30 12:23:00 crc kubenswrapper[5002]: W0930 12:23:00.474762 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4b09a25e_9b24_4ea4_a5a2_d3155a125d36.slice/crio-3c1bb1623e6bcab934df6ea271d1c45ef275dc6140919a5eb1c81178f751ac7d WatchSource:0}: Error finding container 3c1bb1623e6bcab934df6ea271d1c45ef275dc6140919a5eb1c81178f751ac7d: Status 404 returned error can't find the container with id 3c1bb1623e6bcab934df6ea271d1c45ef275dc6140919a5eb1c81178f751ac7d Sep 30 12:23:00 crc kubenswrapper[5002]: I0930 12:23:00.881070 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"4de266de-62cd-4654-9083-f100649e46b7","Type":"ContainerDied","Data":"1ca6cd558a97e9f7c36b6251eb4a1500480c13e70417037534f52f1104a7389b"} Sep 30 12:23:00 crc kubenswrapper[5002]: I0930 12:23:00.881403 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ca6cd558a97e9f7c36b6251eb4a1500480c13e70417037534f52f1104a7389b" Sep 30 12:23:00 crc kubenswrapper[5002]: I0930 12:23:00.881263 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 12:23:00 crc kubenswrapper[5002]: I0930 12:23:00.888993 5002 generic.go:334] "Generic (PLEG): container finished" podID="30cf48ad-b568-4511-9877-bad2837d969e" containerID="993b97b428373e7da3ddd2d8dd55154cb30d8302cbd1bc1c64ffc2b73c37083c" exitCode=0 Sep 30 12:23:00 crc kubenswrapper[5002]: I0930 12:23:00.889227 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rhw4l" event={"ID":"30cf48ad-b568-4511-9877-bad2837d969e","Type":"ContainerDied","Data":"993b97b428373e7da3ddd2d8dd55154cb30d8302cbd1bc1c64ffc2b73c37083c"} Sep 30 12:23:00 crc kubenswrapper[5002]: I0930 12:23:00.889255 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rhw4l" event={"ID":"30cf48ad-b568-4511-9877-bad2837d969e","Type":"ContainerStarted","Data":"bb82e2a6a04df0d98864bd29604422f373266e0b0445628a4cb20502704bb9f4"} Sep 30 12:23:00 crc kubenswrapper[5002]: I0930 12:23:00.891000 5002 generic.go:334] "Generic (PLEG): container finished" podID="4b09a25e-9b24-4ea4-a5a2-d3155a125d36" containerID="4fe3d76f5d12be6c583b11fda0704fa02d7168d28688b217a15732085f37743e" exitCode=0 Sep 30 12:23:00 crc kubenswrapper[5002]: I0930 12:23:00.891023 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tbphh" event={"ID":"4b09a25e-9b24-4ea4-a5a2-d3155a125d36","Type":"ContainerDied","Data":"4fe3d76f5d12be6c583b11fda0704fa02d7168d28688b217a15732085f37743e"} Sep 30 12:23:00 crc kubenswrapper[5002]: I0930 12:23:00.891042 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tbphh" event={"ID":"4b09a25e-9b24-4ea4-a5a2-d3155a125d36","Type":"ContainerStarted","Data":"3c1bb1623e6bcab934df6ea271d1c45ef275dc6140919a5eb1c81178f751ac7d"} Sep 30 12:23:01 crc kubenswrapper[5002]: I0930 12:23:01.093424 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 30 12:23:01 crc kubenswrapper[5002]: E0930 12:23:01.093625 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4de266de-62cd-4654-9083-f100649e46b7" containerName="pruner" Sep 30 12:23:01 crc kubenswrapper[5002]: I0930 12:23:01.093637 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="4de266de-62cd-4654-9083-f100649e46b7" containerName="pruner" Sep 30 12:23:01 crc kubenswrapper[5002]: I0930 12:23:01.093736 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="4de266de-62cd-4654-9083-f100649e46b7" containerName="pruner" Sep 30 12:23:01 crc kubenswrapper[5002]: I0930 12:23:01.094175 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 12:23:01 crc kubenswrapper[5002]: I0930 12:23:01.096135 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Sep 30 12:23:01 crc kubenswrapper[5002]: I0930 12:23:01.096303 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Sep 30 12:23:01 crc kubenswrapper[5002]: I0930 12:23:01.103353 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 30 12:23:01 crc kubenswrapper[5002]: I0930 12:23:01.245171 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/35381678-7a1b-4eab-9e1a-64b75b8c0eba-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"35381678-7a1b-4eab-9e1a-64b75b8c0eba\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 12:23:01 crc kubenswrapper[5002]: I0930 12:23:01.245289 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/35381678-7a1b-4eab-9e1a-64b75b8c0eba-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"35381678-7a1b-4eab-9e1a-64b75b8c0eba\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 12:23:01 crc kubenswrapper[5002]: I0930 12:23:01.346752 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/35381678-7a1b-4eab-9e1a-64b75b8c0eba-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"35381678-7a1b-4eab-9e1a-64b75b8c0eba\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 12:23:01 crc kubenswrapper[5002]: I0930 12:23:01.346846 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/35381678-7a1b-4eab-9e1a-64b75b8c0eba-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"35381678-7a1b-4eab-9e1a-64b75b8c0eba\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 12:23:01 crc kubenswrapper[5002]: I0930 12:23:01.346914 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/35381678-7a1b-4eab-9e1a-64b75b8c0eba-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"35381678-7a1b-4eab-9e1a-64b75b8c0eba\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 12:23:01 crc kubenswrapper[5002]: I0930 12:23:01.365060 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/35381678-7a1b-4eab-9e1a-64b75b8c0eba-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"35381678-7a1b-4eab-9e1a-64b75b8c0eba\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 12:23:01 crc kubenswrapper[5002]: I0930 12:23:01.413648 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 12:23:01 crc kubenswrapper[5002]: I0930 12:23:01.898388 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 30 12:23:02 crc kubenswrapper[5002]: I0930 12:23:02.098325 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:23:02 crc kubenswrapper[5002]: I0930 12:23:02.098429 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:23:02 crc kubenswrapper[5002]: I0930 12:23:02.931640 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"35381678-7a1b-4eab-9e1a-64b75b8c0eba","Type":"ContainerStarted","Data":"d48a836963c9d7569d59380623c236454c4fd86f8fe75de130c0c778ae46a9e7"} Sep 30 12:23:02 crc kubenswrapper[5002]: I0930 12:23:02.931927 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"35381678-7a1b-4eab-9e1a-64b75b8c0eba","Type":"ContainerStarted","Data":"101a9509dc38f3d77aa6337096588be794e2accdd9985211bec34ddf61207735"} Sep 30 12:23:02 crc kubenswrapper[5002]: I0930 12:23:02.960849 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=1.960833222 podStartE2EDuration="1.960833222s" podCreationTimestamp="2025-09-30 12:23:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:23:02.959772062 +0000 UTC m=+157.209454218" watchObservedRunningTime="2025-09-30 12:23:02.960833222 +0000 UTC m=+157.210515368" Sep 30 12:23:03 crc kubenswrapper[5002]: I0930 12:23:03.872287 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-h52ns" Sep 30 12:23:03 crc kubenswrapper[5002]: I0930 12:23:03.953849 5002 generic.go:334] "Generic (PLEG): container finished" podID="35381678-7a1b-4eab-9e1a-64b75b8c0eba" containerID="d48a836963c9d7569d59380623c236454c4fd86f8fe75de130c0c778ae46a9e7" exitCode=0 Sep 30 12:23:03 crc kubenswrapper[5002]: I0930 12:23:03.953941 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"35381678-7a1b-4eab-9e1a-64b75b8c0eba","Type":"ContainerDied","Data":"d48a836963c9d7569d59380623c236454c4fd86f8fe75de130c0c778ae46a9e7"} Sep 30 12:23:08 crc kubenswrapper[5002]: I0930 12:23:08.659002 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-tdxkp" Sep 30 12:23:08 crc kubenswrapper[5002]: I0930 12:23:08.661576 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-79sft" Sep 30 12:23:08 crc kubenswrapper[5002]: I0930 12:23:08.665034 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-79sft" Sep 30 12:23:12 crc kubenswrapper[5002]: I0930 12:23:12.443711 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/68756e8a-d882-403f-acd7-2c41fce4446f-metrics-certs\") pod \"network-metrics-daemon-dj2ln\" (UID: \"68756e8a-d882-403f-acd7-2c41fce4446f\") " pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:23:12 crc kubenswrapper[5002]: I0930 12:23:12.452404 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/68756e8a-d882-403f-acd7-2c41fce4446f-metrics-certs\") pod \"network-metrics-daemon-dj2ln\" (UID: \"68756e8a-d882-403f-acd7-2c41fce4446f\") " pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:23:12 crc kubenswrapper[5002]: I0930 12:23:12.695722 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dj2ln" Sep 30 12:23:16 crc kubenswrapper[5002]: I0930 12:23:16.028727 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 12:23:16 crc kubenswrapper[5002]: I0930 12:23:16.057925 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"35381678-7a1b-4eab-9e1a-64b75b8c0eba","Type":"ContainerDied","Data":"101a9509dc38f3d77aa6337096588be794e2accdd9985211bec34ddf61207735"} Sep 30 12:23:16 crc kubenswrapper[5002]: I0930 12:23:16.057981 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="101a9509dc38f3d77aa6337096588be794e2accdd9985211bec34ddf61207735" Sep 30 12:23:16 crc kubenswrapper[5002]: I0930 12:23:16.058053 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 12:23:16 crc kubenswrapper[5002]: I0930 12:23:16.174633 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:23:16 crc kubenswrapper[5002]: I0930 12:23:16.208938 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/35381678-7a1b-4eab-9e1a-64b75b8c0eba-kube-api-access\") pod \"35381678-7a1b-4eab-9e1a-64b75b8c0eba\" (UID: \"35381678-7a1b-4eab-9e1a-64b75b8c0eba\") " Sep 30 12:23:16 crc kubenswrapper[5002]: I0930 12:23:16.209118 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/35381678-7a1b-4eab-9e1a-64b75b8c0eba-kubelet-dir\") pod \"35381678-7a1b-4eab-9e1a-64b75b8c0eba\" (UID: \"35381678-7a1b-4eab-9e1a-64b75b8c0eba\") " Sep 30 12:23:16 crc kubenswrapper[5002]: I0930 12:23:16.209557 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/35381678-7a1b-4eab-9e1a-64b75b8c0eba-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "35381678-7a1b-4eab-9e1a-64b75b8c0eba" (UID: "35381678-7a1b-4eab-9e1a-64b75b8c0eba"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 12:23:16 crc kubenswrapper[5002]: I0930 12:23:16.218717 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35381678-7a1b-4eab-9e1a-64b75b8c0eba-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "35381678-7a1b-4eab-9e1a-64b75b8c0eba" (UID: "35381678-7a1b-4eab-9e1a-64b75b8c0eba"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:23:16 crc kubenswrapper[5002]: I0930 12:23:16.310946 5002 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/35381678-7a1b-4eab-9e1a-64b75b8c0eba-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 30 12:23:16 crc kubenswrapper[5002]: I0930 12:23:16.311340 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/35381678-7a1b-4eab-9e1a-64b75b8c0eba-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 12:23:25 crc kubenswrapper[5002]: I0930 12:23:25.057841 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 12:23:27 crc kubenswrapper[5002]: E0930 12:23:27.274094 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Sep 30 12:23:27 crc kubenswrapper[5002]: E0930 12:23:27.274839 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rf2nr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-ccfzv_openshift-marketplace(1c74574a-77fc-4f9f-93ff-ff2b64f27312): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 12:23:27 crc kubenswrapper[5002]: E0930 12:23:27.276237 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-ccfzv" podUID="1c74574a-77fc-4f9f-93ff-ff2b64f27312" Sep 30 12:23:27 crc kubenswrapper[5002]: E0930 12:23:27.936422 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Sep 30 12:23:27 crc kubenswrapper[5002]: E0930 12:23:27.936586 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vw9t5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-fr4sd_openshift-marketplace(7a6520d7-b4d9-4837-a574-48cb86ee6231): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 12:23:27 crc kubenswrapper[5002]: E0930 12:23:27.938040 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-fr4sd" podUID="7a6520d7-b4d9-4837-a574-48cb86ee6231" Sep 30 12:23:29 crc kubenswrapper[5002]: E0930 12:23:29.046957 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-ccfzv" podUID="1c74574a-77fc-4f9f-93ff-ff2b64f27312" Sep 30 12:23:29 crc kubenswrapper[5002]: E0930 12:23:29.049100 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-fr4sd" podUID="7a6520d7-b4d9-4837-a574-48cb86ee6231" Sep 30 12:23:29 crc kubenswrapper[5002]: I0930 12:23:29.054926 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2jxnc" Sep 30 12:23:29 crc kubenswrapper[5002]: E0930 12:23:29.117112 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Sep 30 12:23:29 crc kubenswrapper[5002]: E0930 12:23:29.117259 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rxj5m,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-7bvwm_openshift-marketplace(c2314206-9216-42a7-a942-5be7b30543be): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 12:23:29 crc kubenswrapper[5002]: E0930 12:23:29.119012 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-7bvwm" podUID="c2314206-9216-42a7-a942-5be7b30543be" Sep 30 12:23:31 crc kubenswrapper[5002]: E0930 12:23:31.666822 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-7bvwm" podUID="c2314206-9216-42a7-a942-5be7b30543be" Sep 30 12:23:31 crc kubenswrapper[5002]: E0930 12:23:31.807965 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Sep 30 12:23:31 crc kubenswrapper[5002]: E0930 12:23:31.808330 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9vglg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-rhw4l_openshift-marketplace(30cf48ad-b568-4511-9877-bad2837d969e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 12:23:31 crc kubenswrapper[5002]: E0930 12:23:31.810000 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-rhw4l" podUID="30cf48ad-b568-4511-9877-bad2837d969e" Sep 30 12:23:31 crc kubenswrapper[5002]: E0930 12:23:31.854820 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Sep 30 12:23:31 crc kubenswrapper[5002]: E0930 12:23:31.854972 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jkqtp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-9vlx8_openshift-marketplace(e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 12:23:31 crc kubenswrapper[5002]: E0930 12:23:31.856682 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-9vlx8" podUID="e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b" Sep 30 12:23:31 crc kubenswrapper[5002]: E0930 12:23:31.890116 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Sep 30 12:23:31 crc kubenswrapper[5002]: E0930 12:23:31.890283 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4mplt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-5w8h8_openshift-marketplace(00884b54-ca12-4a91-a28b-fcd78a870b68): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 12:23:31 crc kubenswrapper[5002]: E0930 12:23:31.891609 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-5w8h8" podUID="00884b54-ca12-4a91-a28b-fcd78a870b68" Sep 30 12:23:32 crc kubenswrapper[5002]: I0930 12:23:32.099262 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:23:32 crc kubenswrapper[5002]: I0930 12:23:32.099627 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:23:32 crc kubenswrapper[5002]: I0930 12:23:32.143621 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tbphh" event={"ID":"4b09a25e-9b24-4ea4-a5a2-d3155a125d36","Type":"ContainerStarted","Data":"ebe5b5e2cfe6cad13ee7f9783b76e5f460498787830ca2b5a6ac35f958f63e29"} Sep 30 12:23:32 crc kubenswrapper[5002]: I0930 12:23:32.148662 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tbfr4" event={"ID":"78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559","Type":"ContainerStarted","Data":"3eb07b36032c20ac9673490e749f8f131a224903b3c4710636680282fe13ddc7"} Sep 30 12:23:32 crc kubenswrapper[5002]: E0930 12:23:32.149539 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-9vlx8" podUID="e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b" Sep 30 12:23:32 crc kubenswrapper[5002]: E0930 12:23:32.149741 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-5w8h8" podUID="00884b54-ca12-4a91-a28b-fcd78a870b68" Sep 30 12:23:32 crc kubenswrapper[5002]: E0930 12:23:32.151499 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-rhw4l" podUID="30cf48ad-b568-4511-9877-bad2837d969e" Sep 30 12:23:32 crc kubenswrapper[5002]: I0930 12:23:32.183794 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-dj2ln"] Sep 30 12:23:32 crc kubenswrapper[5002]: W0930 12:23:32.192850 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod68756e8a_d882_403f_acd7_2c41fce4446f.slice/crio-c4d3523981310b3d551189501aeab82617371e457f490b371201f4fba1469b36 WatchSource:0}: Error finding container c4d3523981310b3d551189501aeab82617371e457f490b371201f4fba1469b36: Status 404 returned error can't find the container with id c4d3523981310b3d551189501aeab82617371e457f490b371201f4fba1469b36 Sep 30 12:23:33 crc kubenswrapper[5002]: I0930 12:23:33.155985 5002 generic.go:334] "Generic (PLEG): container finished" podID="78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559" containerID="3eb07b36032c20ac9673490e749f8f131a224903b3c4710636680282fe13ddc7" exitCode=0 Sep 30 12:23:33 crc kubenswrapper[5002]: I0930 12:23:33.156064 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tbfr4" event={"ID":"78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559","Type":"ContainerDied","Data":"3eb07b36032c20ac9673490e749f8f131a224903b3c4710636680282fe13ddc7"} Sep 30 12:23:33 crc kubenswrapper[5002]: I0930 12:23:33.157919 5002 generic.go:334] "Generic (PLEG): container finished" podID="4b09a25e-9b24-4ea4-a5a2-d3155a125d36" containerID="ebe5b5e2cfe6cad13ee7f9783b76e5f460498787830ca2b5a6ac35f958f63e29" exitCode=0 Sep 30 12:23:33 crc kubenswrapper[5002]: I0930 12:23:33.158156 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tbphh" event={"ID":"4b09a25e-9b24-4ea4-a5a2-d3155a125d36","Type":"ContainerDied","Data":"ebe5b5e2cfe6cad13ee7f9783b76e5f460498787830ca2b5a6ac35f958f63e29"} Sep 30 12:23:33 crc kubenswrapper[5002]: I0930 12:23:33.163893 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-dj2ln" event={"ID":"68756e8a-d882-403f-acd7-2c41fce4446f","Type":"ContainerStarted","Data":"7a18d05df600f76b14f52f42c88313b22bcf8643454d8048b282dd525bfcbb92"} Sep 30 12:23:33 crc kubenswrapper[5002]: I0930 12:23:33.164007 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-dj2ln" event={"ID":"68756e8a-d882-403f-acd7-2c41fce4446f","Type":"ContainerStarted","Data":"d25d24e36d4905d4a02eb8def6535b78fe87f09f87c2095d8a4cf0458d773f44"} Sep 30 12:23:33 crc kubenswrapper[5002]: I0930 12:23:33.164075 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-dj2ln" event={"ID":"68756e8a-d882-403f-acd7-2c41fce4446f","Type":"ContainerStarted","Data":"c4d3523981310b3d551189501aeab82617371e457f490b371201f4fba1469b36"} Sep 30 12:23:33 crc kubenswrapper[5002]: I0930 12:23:33.212840 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-dj2ln" podStartSLOduration=164.212754924 podStartE2EDuration="2m44.212754924s" podCreationTimestamp="2025-09-30 12:20:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:23:33.207694962 +0000 UTC m=+187.457377108" watchObservedRunningTime="2025-09-30 12:23:33.212754924 +0000 UTC m=+187.462437140" Sep 30 12:23:34 crc kubenswrapper[5002]: I0930 12:23:34.181083 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tbfr4" event={"ID":"78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559","Type":"ContainerStarted","Data":"1333e276a97fc8b87443d3fa70edc1ff2ed898a3d5d12ea07101ea06b9b51c12"} Sep 30 12:23:34 crc kubenswrapper[5002]: I0930 12:23:34.211419 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tbfr4" podStartSLOduration=2.071099361 podStartE2EDuration="38.211392536s" podCreationTimestamp="2025-09-30 12:22:56 +0000 UTC" firstStartedPulling="2025-09-30 12:22:57.825934154 +0000 UTC m=+152.075616300" lastFinishedPulling="2025-09-30 12:23:33.966227329 +0000 UTC m=+188.215909475" observedRunningTime="2025-09-30 12:23:34.207304762 +0000 UTC m=+188.456986948" watchObservedRunningTime="2025-09-30 12:23:34.211392536 +0000 UTC m=+188.461074722" Sep 30 12:23:35 crc kubenswrapper[5002]: I0930 12:23:35.190652 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tbphh" event={"ID":"4b09a25e-9b24-4ea4-a5a2-d3155a125d36","Type":"ContainerStarted","Data":"a98d24025d3fd640dc06d78afd30b9cff09fbb8bea14f474b2f1374d8a0af5aa"} Sep 30 12:23:36 crc kubenswrapper[5002]: I0930 12:23:36.746902 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tbfr4" Sep 30 12:23:36 crc kubenswrapper[5002]: I0930 12:23:36.747898 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tbfr4" Sep 30 12:23:37 crc kubenswrapper[5002]: I0930 12:23:37.892831 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-tbfr4" podUID="78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559" containerName="registry-server" probeResult="failure" output=< Sep 30 12:23:37 crc kubenswrapper[5002]: timeout: failed to connect service ":50051" within 1s Sep 30 12:23:37 crc kubenswrapper[5002]: > Sep 30 12:23:39 crc kubenswrapper[5002]: I0930 12:23:39.965874 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tbphh" Sep 30 12:23:39 crc kubenswrapper[5002]: I0930 12:23:39.965945 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tbphh" Sep 30 12:23:41 crc kubenswrapper[5002]: I0930 12:23:41.007113 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-tbphh" podUID="4b09a25e-9b24-4ea4-a5a2-d3155a125d36" containerName="registry-server" probeResult="failure" output=< Sep 30 12:23:41 crc kubenswrapper[5002]: timeout: failed to connect service ":50051" within 1s Sep 30 12:23:41 crc kubenswrapper[5002]: > Sep 30 12:23:41 crc kubenswrapper[5002]: I0930 12:23:41.707566 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tbphh" podStartSLOduration=9.489811462 podStartE2EDuration="42.707546668s" podCreationTimestamp="2025-09-30 12:22:59 +0000 UTC" firstStartedPulling="2025-09-30 12:23:00.892350329 +0000 UTC m=+155.142032475" lastFinishedPulling="2025-09-30 12:23:34.110085515 +0000 UTC m=+188.359767681" observedRunningTime="2025-09-30 12:23:35.2147066 +0000 UTC m=+189.464388756" watchObservedRunningTime="2025-09-30 12:23:41.707546668 +0000 UTC m=+195.957228834" Sep 30 12:23:43 crc kubenswrapper[5002]: I0930 12:23:43.242132 5002 generic.go:334] "Generic (PLEG): container finished" podID="1c74574a-77fc-4f9f-93ff-ff2b64f27312" containerID="404b2974489712b3805ca6fb2670926ce35f8948e54abd0f22d6f852e6fbde40" exitCode=0 Sep 30 12:23:43 crc kubenswrapper[5002]: I0930 12:23:43.242352 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ccfzv" event={"ID":"1c74574a-77fc-4f9f-93ff-ff2b64f27312","Type":"ContainerDied","Data":"404b2974489712b3805ca6fb2670926ce35f8948e54abd0f22d6f852e6fbde40"} Sep 30 12:23:46 crc kubenswrapper[5002]: I0930 12:23:46.841374 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tbfr4" Sep 30 12:23:46 crc kubenswrapper[5002]: I0930 12:23:46.893650 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tbfr4" Sep 30 12:23:47 crc kubenswrapper[5002]: I0930 12:23:47.924585 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tbfr4"] Sep 30 12:23:48 crc kubenswrapper[5002]: I0930 12:23:48.272577 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-tbfr4" podUID="78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559" containerName="registry-server" containerID="cri-o://1333e276a97fc8b87443d3fa70edc1ff2ed898a3d5d12ea07101ea06b9b51c12" gracePeriod=2 Sep 30 12:23:48 crc kubenswrapper[5002]: E0930 12:23:48.513768 5002 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod78cdb7b1_d7cc_4c42_b2c0_7cd3e4f6c559.slice/crio-1333e276a97fc8b87443d3fa70edc1ff2ed898a3d5d12ea07101ea06b9b51c12.scope\": RecentStats: unable to find data in memory cache]" Sep 30 12:23:49 crc kubenswrapper[5002]: I0930 12:23:49.284549 5002 generic.go:334] "Generic (PLEG): container finished" podID="78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559" containerID="1333e276a97fc8b87443d3fa70edc1ff2ed898a3d5d12ea07101ea06b9b51c12" exitCode=0 Sep 30 12:23:49 crc kubenswrapper[5002]: I0930 12:23:49.284669 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tbfr4" event={"ID":"78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559","Type":"ContainerDied","Data":"1333e276a97fc8b87443d3fa70edc1ff2ed898a3d5d12ea07101ea06b9b51c12"} Sep 30 12:23:50 crc kubenswrapper[5002]: I0930 12:23:50.049467 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tbphh" Sep 30 12:23:50 crc kubenswrapper[5002]: I0930 12:23:50.104539 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tbphh" Sep 30 12:23:51 crc kubenswrapper[5002]: I0930 12:23:51.012679 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tbfr4" Sep 30 12:23:51 crc kubenswrapper[5002]: I0930 12:23:51.203999 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g6pdc\" (UniqueName: \"kubernetes.io/projected/78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559-kube-api-access-g6pdc\") pod \"78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559\" (UID: \"78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559\") " Sep 30 12:23:51 crc kubenswrapper[5002]: I0930 12:23:51.204067 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559-catalog-content\") pod \"78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559\" (UID: \"78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559\") " Sep 30 12:23:51 crc kubenswrapper[5002]: I0930 12:23:51.204141 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559-utilities\") pod \"78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559\" (UID: \"78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559\") " Sep 30 12:23:51 crc kubenswrapper[5002]: I0930 12:23:51.204897 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559-utilities" (OuterVolumeSpecName: "utilities") pod "78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559" (UID: "78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:23:51 crc kubenswrapper[5002]: I0930 12:23:51.205024 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 12:23:51 crc kubenswrapper[5002]: I0930 12:23:51.209156 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559-kube-api-access-g6pdc" (OuterVolumeSpecName: "kube-api-access-g6pdc") pod "78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559" (UID: "78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559"). InnerVolumeSpecName "kube-api-access-g6pdc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:23:51 crc kubenswrapper[5002]: I0930 12:23:51.250890 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559" (UID: "78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:23:51 crc kubenswrapper[5002]: I0930 12:23:51.295111 5002 generic.go:334] "Generic (PLEG): container finished" podID="e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b" containerID="b63020eede55c46ec02342f4dd863fe7e29d2033fe8b9f29110aae6e6e83b4aa" exitCode=0 Sep 30 12:23:51 crc kubenswrapper[5002]: I0930 12:23:51.295182 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9vlx8" event={"ID":"e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b","Type":"ContainerDied","Data":"b63020eede55c46ec02342f4dd863fe7e29d2033fe8b9f29110aae6e6e83b4aa"} Sep 30 12:23:51 crc kubenswrapper[5002]: I0930 12:23:51.297105 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ccfzv" event={"ID":"1c74574a-77fc-4f9f-93ff-ff2b64f27312","Type":"ContainerStarted","Data":"22e6d1ee232fc55ac08b464dd995aee3fdc0646bde92ee9db0508e19a4a0aa1b"} Sep 30 12:23:51 crc kubenswrapper[5002]: I0930 12:23:51.302679 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7bvwm" event={"ID":"c2314206-9216-42a7-a942-5be7b30543be","Type":"ContainerStarted","Data":"80002f93f344ea44b033c0523331ba9ca7937ed19573f388abe3db5c12994984"} Sep 30 12:23:51 crc kubenswrapper[5002]: I0930 12:23:51.304267 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rhw4l" event={"ID":"30cf48ad-b568-4511-9877-bad2837d969e","Type":"ContainerStarted","Data":"d496999c76eb95bc5eaece602eb6f027a188cc9286e72cb0c7fef03de38af16b"} Sep 30 12:23:51 crc kubenswrapper[5002]: I0930 12:23:51.305449 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g6pdc\" (UniqueName: \"kubernetes.io/projected/78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559-kube-api-access-g6pdc\") on node \"crc\" DevicePath \"\"" Sep 30 12:23:51 crc kubenswrapper[5002]: I0930 12:23:51.305481 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 12:23:51 crc kubenswrapper[5002]: I0930 12:23:51.305826 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tbfr4" Sep 30 12:23:51 crc kubenswrapper[5002]: I0930 12:23:51.305921 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tbfr4" event={"ID":"78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559","Type":"ContainerDied","Data":"ea4df70ac60be647b1b7f059902d1318c1abcd554d7b34f0bbdc30e230dd4fd2"} Sep 30 12:23:51 crc kubenswrapper[5002]: I0930 12:23:51.305984 5002 scope.go:117] "RemoveContainer" containerID="1333e276a97fc8b87443d3fa70edc1ff2ed898a3d5d12ea07101ea06b9b51c12" Sep 30 12:23:51 crc kubenswrapper[5002]: I0930 12:23:51.307128 5002 generic.go:334] "Generic (PLEG): container finished" podID="7a6520d7-b4d9-4837-a574-48cb86ee6231" containerID="c6e34ba553b4dfedb6f9ef4977e4877cf1241bebf7a2873dee2f74f23eafee97" exitCode=0 Sep 30 12:23:51 crc kubenswrapper[5002]: I0930 12:23:51.307148 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fr4sd" event={"ID":"7a6520d7-b4d9-4837-a574-48cb86ee6231","Type":"ContainerDied","Data":"c6e34ba553b4dfedb6f9ef4977e4877cf1241bebf7a2873dee2f74f23eafee97"} Sep 30 12:23:51 crc kubenswrapper[5002]: I0930 12:23:51.323291 5002 scope.go:117] "RemoveContainer" containerID="3eb07b36032c20ac9673490e749f8f131a224903b3c4710636680282fe13ddc7" Sep 30 12:23:51 crc kubenswrapper[5002]: I0930 12:23:51.343404 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-ccfzv" podStartSLOduration=3.341038932 podStartE2EDuration="56.343377782s" podCreationTimestamp="2025-09-30 12:22:55 +0000 UTC" firstStartedPulling="2025-09-30 12:22:57.804462462 +0000 UTC m=+152.054144608" lastFinishedPulling="2025-09-30 12:23:50.806801312 +0000 UTC m=+205.056483458" observedRunningTime="2025-09-30 12:23:51.337733808 +0000 UTC m=+205.587415974" watchObservedRunningTime="2025-09-30 12:23:51.343377782 +0000 UTC m=+205.593059928" Sep 30 12:23:51 crc kubenswrapper[5002]: I0930 12:23:51.382027 5002 scope.go:117] "RemoveContainer" containerID="6ef344b3df2e43ae4b811e90d577522e10ff355b9390c6b085c0fe122c505d3e" Sep 30 12:23:51 crc kubenswrapper[5002]: I0930 12:23:51.425064 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tbfr4"] Sep 30 12:23:51 crc kubenswrapper[5002]: I0930 12:23:51.428754 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-tbfr4"] Sep 30 12:23:51 crc kubenswrapper[5002]: I0930 12:23:51.921334 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tbphh"] Sep 30 12:23:51 crc kubenswrapper[5002]: I0930 12:23:51.922173 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tbphh" podUID="4b09a25e-9b24-4ea4-a5a2-d3155a125d36" containerName="registry-server" containerID="cri-o://a98d24025d3fd640dc06d78afd30b9cff09fbb8bea14f474b2f1374d8a0af5aa" gracePeriod=2 Sep 30 12:23:52 crc kubenswrapper[5002]: I0930 12:23:52.315010 5002 generic.go:334] "Generic (PLEG): container finished" podID="30cf48ad-b568-4511-9877-bad2837d969e" containerID="d496999c76eb95bc5eaece602eb6f027a188cc9286e72cb0c7fef03de38af16b" exitCode=0 Sep 30 12:23:52 crc kubenswrapper[5002]: I0930 12:23:52.315088 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rhw4l" event={"ID":"30cf48ad-b568-4511-9877-bad2837d969e","Type":"ContainerDied","Data":"d496999c76eb95bc5eaece602eb6f027a188cc9286e72cb0c7fef03de38af16b"} Sep 30 12:23:52 crc kubenswrapper[5002]: I0930 12:23:52.321299 5002 generic.go:334] "Generic (PLEG): container finished" podID="4b09a25e-9b24-4ea4-a5a2-d3155a125d36" containerID="a98d24025d3fd640dc06d78afd30b9cff09fbb8bea14f474b2f1374d8a0af5aa" exitCode=0 Sep 30 12:23:52 crc kubenswrapper[5002]: I0930 12:23:52.321406 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tbphh" event={"ID":"4b09a25e-9b24-4ea4-a5a2-d3155a125d36","Type":"ContainerDied","Data":"a98d24025d3fd640dc06d78afd30b9cff09fbb8bea14f474b2f1374d8a0af5aa"} Sep 30 12:23:52 crc kubenswrapper[5002]: I0930 12:23:52.324015 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fr4sd" event={"ID":"7a6520d7-b4d9-4837-a574-48cb86ee6231","Type":"ContainerStarted","Data":"607c13ea530d1231d27bd3a7b78bc5138fdfed7e6787463c85338519c1f8a3ab"} Sep 30 12:23:52 crc kubenswrapper[5002]: I0930 12:23:52.326338 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9vlx8" event={"ID":"e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b","Type":"ContainerStarted","Data":"0dd90e115e7c0af7f2b7242c1b9d45a7b65b608d6ecb686eb1fad2a7b6d95013"} Sep 30 12:23:52 crc kubenswrapper[5002]: I0930 12:23:52.327958 5002 generic.go:334] "Generic (PLEG): container finished" podID="00884b54-ca12-4a91-a28b-fcd78a870b68" containerID="84fc9e2b3a219e719c69528be80b2d8684add00db31fc5cd69d9bca958ce049d" exitCode=0 Sep 30 12:23:52 crc kubenswrapper[5002]: I0930 12:23:52.328024 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5w8h8" event={"ID":"00884b54-ca12-4a91-a28b-fcd78a870b68","Type":"ContainerDied","Data":"84fc9e2b3a219e719c69528be80b2d8684add00db31fc5cd69d9bca958ce049d"} Sep 30 12:23:52 crc kubenswrapper[5002]: I0930 12:23:52.332623 5002 generic.go:334] "Generic (PLEG): container finished" podID="c2314206-9216-42a7-a942-5be7b30543be" containerID="80002f93f344ea44b033c0523331ba9ca7937ed19573f388abe3db5c12994984" exitCode=0 Sep 30 12:23:52 crc kubenswrapper[5002]: I0930 12:23:52.332644 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7bvwm" event={"ID":"c2314206-9216-42a7-a942-5be7b30543be","Type":"ContainerDied","Data":"80002f93f344ea44b033c0523331ba9ca7937ed19573f388abe3db5c12994984"} Sep 30 12:23:52 crc kubenswrapper[5002]: I0930 12:23:52.409094 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fr4sd" podStartSLOduration=2.444361635 podStartE2EDuration="54.409073854s" podCreationTimestamp="2025-09-30 12:22:58 +0000 UTC" firstStartedPulling="2025-09-30 12:22:59.874393395 +0000 UTC m=+154.124075551" lastFinishedPulling="2025-09-30 12:23:51.839105594 +0000 UTC m=+206.088787770" observedRunningTime="2025-09-30 12:23:52.391978157 +0000 UTC m=+206.641660333" watchObservedRunningTime="2025-09-30 12:23:52.409073854 +0000 UTC m=+206.658756000" Sep 30 12:23:52 crc kubenswrapper[5002]: I0930 12:23:52.411529 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-9vlx8" podStartSLOduration=2.573044547 podStartE2EDuration="54.41151856s" podCreationTimestamp="2025-09-30 12:22:58 +0000 UTC" firstStartedPulling="2025-09-30 12:22:59.872863572 +0000 UTC m=+154.122545718" lastFinishedPulling="2025-09-30 12:23:51.711337585 +0000 UTC m=+205.961019731" observedRunningTime="2025-09-30 12:23:52.408841887 +0000 UTC m=+206.658524023" watchObservedRunningTime="2025-09-30 12:23:52.41151856 +0000 UTC m=+206.661200706" Sep 30 12:23:52 crc kubenswrapper[5002]: I0930 12:23:52.687727 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559" path="/var/lib/kubelet/pods/78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559/volumes" Sep 30 12:23:52 crc kubenswrapper[5002]: I0930 12:23:52.777752 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tbphh" Sep 30 12:23:52 crc kubenswrapper[5002]: I0930 12:23:52.930609 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b09a25e-9b24-4ea4-a5a2-d3155a125d36-utilities\") pod \"4b09a25e-9b24-4ea4-a5a2-d3155a125d36\" (UID: \"4b09a25e-9b24-4ea4-a5a2-d3155a125d36\") " Sep 30 12:23:52 crc kubenswrapper[5002]: I0930 12:23:52.930964 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mghzq\" (UniqueName: \"kubernetes.io/projected/4b09a25e-9b24-4ea4-a5a2-d3155a125d36-kube-api-access-mghzq\") pod \"4b09a25e-9b24-4ea4-a5a2-d3155a125d36\" (UID: \"4b09a25e-9b24-4ea4-a5a2-d3155a125d36\") " Sep 30 12:23:52 crc kubenswrapper[5002]: I0930 12:23:52.931002 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b09a25e-9b24-4ea4-a5a2-d3155a125d36-catalog-content\") pod \"4b09a25e-9b24-4ea4-a5a2-d3155a125d36\" (UID: \"4b09a25e-9b24-4ea4-a5a2-d3155a125d36\") " Sep 30 12:23:52 crc kubenswrapper[5002]: I0930 12:23:52.932520 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b09a25e-9b24-4ea4-a5a2-d3155a125d36-utilities" (OuterVolumeSpecName: "utilities") pod "4b09a25e-9b24-4ea4-a5a2-d3155a125d36" (UID: "4b09a25e-9b24-4ea4-a5a2-d3155a125d36"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:23:52 crc kubenswrapper[5002]: I0930 12:23:52.943656 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b09a25e-9b24-4ea4-a5a2-d3155a125d36-kube-api-access-mghzq" (OuterVolumeSpecName: "kube-api-access-mghzq") pod "4b09a25e-9b24-4ea4-a5a2-d3155a125d36" (UID: "4b09a25e-9b24-4ea4-a5a2-d3155a125d36"). InnerVolumeSpecName "kube-api-access-mghzq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:23:53 crc kubenswrapper[5002]: I0930 12:23:53.027699 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b09a25e-9b24-4ea4-a5a2-d3155a125d36-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4b09a25e-9b24-4ea4-a5a2-d3155a125d36" (UID: "4b09a25e-9b24-4ea4-a5a2-d3155a125d36"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:23:53 crc kubenswrapper[5002]: I0930 12:23:53.032827 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mghzq\" (UniqueName: \"kubernetes.io/projected/4b09a25e-9b24-4ea4-a5a2-d3155a125d36-kube-api-access-mghzq\") on node \"crc\" DevicePath \"\"" Sep 30 12:23:53 crc kubenswrapper[5002]: I0930 12:23:53.032858 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b09a25e-9b24-4ea4-a5a2-d3155a125d36-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 12:23:53 crc kubenswrapper[5002]: I0930 12:23:53.032868 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b09a25e-9b24-4ea4-a5a2-d3155a125d36-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 12:23:53 crc kubenswrapper[5002]: I0930 12:23:53.339071 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tbphh" event={"ID":"4b09a25e-9b24-4ea4-a5a2-d3155a125d36","Type":"ContainerDied","Data":"3c1bb1623e6bcab934df6ea271d1c45ef275dc6140919a5eb1c81178f751ac7d"} Sep 30 12:23:53 crc kubenswrapper[5002]: I0930 12:23:53.339104 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tbphh" Sep 30 12:23:53 crc kubenswrapper[5002]: I0930 12:23:53.339368 5002 scope.go:117] "RemoveContainer" containerID="a98d24025d3fd640dc06d78afd30b9cff09fbb8bea14f474b2f1374d8a0af5aa" Sep 30 12:23:53 crc kubenswrapper[5002]: I0930 12:23:53.341912 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5w8h8" event={"ID":"00884b54-ca12-4a91-a28b-fcd78a870b68","Type":"ContainerStarted","Data":"3d9aa45d8dfc0b444319a64ff324caf2387c6fe5026294502cd422f5962386b7"} Sep 30 12:23:53 crc kubenswrapper[5002]: I0930 12:23:53.344152 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7bvwm" event={"ID":"c2314206-9216-42a7-a942-5be7b30543be","Type":"ContainerStarted","Data":"e4b682e03bd32642f952d44a9185cc200f4494cb7ced4ffd87dded1ef1acb6aa"} Sep 30 12:23:53 crc kubenswrapper[5002]: I0930 12:23:53.346206 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rhw4l" event={"ID":"30cf48ad-b568-4511-9877-bad2837d969e","Type":"ContainerStarted","Data":"477e0350dfa79121d49c81cf03f6c77e4d09b78d1438b5663732eef0e250f271"} Sep 30 12:23:53 crc kubenswrapper[5002]: I0930 12:23:53.375167 5002 scope.go:117] "RemoveContainer" containerID="ebe5b5e2cfe6cad13ee7f9783b76e5f460498787830ca2b5a6ac35f958f63e29" Sep 30 12:23:53 crc kubenswrapper[5002]: I0930 12:23:53.378300 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5w8h8" podStartSLOduration=2.404922315 podStartE2EDuration="57.378282691s" podCreationTimestamp="2025-09-30 12:22:56 +0000 UTC" firstStartedPulling="2025-09-30 12:22:57.803165446 +0000 UTC m=+152.052847602" lastFinishedPulling="2025-09-30 12:23:52.776525832 +0000 UTC m=+207.026207978" observedRunningTime="2025-09-30 12:23:53.377704764 +0000 UTC m=+207.627386940" watchObservedRunningTime="2025-09-30 12:23:53.378282691 +0000 UTC m=+207.627964847" Sep 30 12:23:53 crc kubenswrapper[5002]: I0930 12:23:53.392848 5002 scope.go:117] "RemoveContainer" containerID="4fe3d76f5d12be6c583b11fda0704fa02d7168d28688b217a15732085f37743e" Sep 30 12:23:53 crc kubenswrapper[5002]: I0930 12:23:53.401310 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7bvwm" podStartSLOduration=2.460057355 podStartE2EDuration="57.401286361s" podCreationTimestamp="2025-09-30 12:22:56 +0000 UTC" firstStartedPulling="2025-09-30 12:22:57.821190821 +0000 UTC m=+152.070872967" lastFinishedPulling="2025-09-30 12:23:52.762419817 +0000 UTC m=+207.012101973" observedRunningTime="2025-09-30 12:23:53.395791011 +0000 UTC m=+207.645473197" watchObservedRunningTime="2025-09-30 12:23:53.401286361 +0000 UTC m=+207.650968527" Sep 30 12:23:53 crc kubenswrapper[5002]: I0930 12:23:53.415586 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rhw4l" podStartSLOduration=2.439792356 podStartE2EDuration="54.415564511s" podCreationTimestamp="2025-09-30 12:22:59 +0000 UTC" firstStartedPulling="2025-09-30 12:23:00.892548026 +0000 UTC m=+155.142230172" lastFinishedPulling="2025-09-30 12:23:52.868320181 +0000 UTC m=+207.118002327" observedRunningTime="2025-09-30 12:23:53.413205698 +0000 UTC m=+207.662887854" watchObservedRunningTime="2025-09-30 12:23:53.415564511 +0000 UTC m=+207.665246667" Sep 30 12:23:53 crc kubenswrapper[5002]: I0930 12:23:53.424997 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tbphh"] Sep 30 12:23:53 crc kubenswrapper[5002]: I0930 12:23:53.428092 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tbphh"] Sep 30 12:23:54 crc kubenswrapper[5002]: I0930 12:23:54.684369 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b09a25e-9b24-4ea4-a5a2-d3155a125d36" path="/var/lib/kubelet/pods/4b09a25e-9b24-4ea4-a5a2-d3155a125d36/volumes" Sep 30 12:23:56 crc kubenswrapper[5002]: I0930 12:23:56.327048 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-ccfzv" Sep 30 12:23:56 crc kubenswrapper[5002]: I0930 12:23:56.327330 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-ccfzv" Sep 30 12:23:56 crc kubenswrapper[5002]: I0930 12:23:56.365187 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-ccfzv" Sep 30 12:23:56 crc kubenswrapper[5002]: I0930 12:23:56.420848 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-ccfzv" Sep 30 12:23:56 crc kubenswrapper[5002]: I0930 12:23:56.526607 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5w8h8" Sep 30 12:23:56 crc kubenswrapper[5002]: I0930 12:23:56.526690 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5w8h8" Sep 30 12:23:56 crc kubenswrapper[5002]: I0930 12:23:56.565703 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5w8h8" Sep 30 12:23:56 crc kubenswrapper[5002]: I0930 12:23:56.934209 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7bvwm" Sep 30 12:23:56 crc kubenswrapper[5002]: I0930 12:23:56.934622 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7bvwm" Sep 30 12:23:56 crc kubenswrapper[5002]: I0930 12:23:56.979833 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7bvwm" Sep 30 12:23:57 crc kubenswrapper[5002]: I0930 12:23:57.405881 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7bvwm" Sep 30 12:23:57 crc kubenswrapper[5002]: I0930 12:23:57.408742 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5w8h8" Sep 30 12:23:58 crc kubenswrapper[5002]: I0930 12:23:58.549387 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fr4sd" Sep 30 12:23:58 crc kubenswrapper[5002]: I0930 12:23:58.549437 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fr4sd" Sep 30 12:23:58 crc kubenswrapper[5002]: I0930 12:23:58.595831 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fr4sd" Sep 30 12:23:58 crc kubenswrapper[5002]: I0930 12:23:58.954330 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-9vlx8" Sep 30 12:23:58 crc kubenswrapper[5002]: I0930 12:23:58.954387 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-9vlx8" Sep 30 12:23:59 crc kubenswrapper[5002]: I0930 12:23:59.008677 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-9vlx8" Sep 30 12:23:59 crc kubenswrapper[5002]: I0930 12:23:59.321674 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7bvwm"] Sep 30 12:23:59 crc kubenswrapper[5002]: I0930 12:23:59.383249 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7bvwm" podUID="c2314206-9216-42a7-a942-5be7b30543be" containerName="registry-server" containerID="cri-o://e4b682e03bd32642f952d44a9185cc200f4494cb7ced4ffd87dded1ef1acb6aa" gracePeriod=2 Sep 30 12:23:59 crc kubenswrapper[5002]: I0930 12:23:59.425039 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-9vlx8" Sep 30 12:23:59 crc kubenswrapper[5002]: I0930 12:23:59.425372 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fr4sd" Sep 30 12:23:59 crc kubenswrapper[5002]: I0930 12:23:59.539031 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rhw4l" Sep 30 12:23:59 crc kubenswrapper[5002]: I0930 12:23:59.539074 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rhw4l" Sep 30 12:23:59 crc kubenswrapper[5002]: I0930 12:23:59.573553 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rhw4l" Sep 30 12:23:59 crc kubenswrapper[5002]: I0930 12:23:59.724559 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7bvwm" Sep 30 12:23:59 crc kubenswrapper[5002]: I0930 12:23:59.919302 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2314206-9216-42a7-a942-5be7b30543be-catalog-content\") pod \"c2314206-9216-42a7-a942-5be7b30543be\" (UID: \"c2314206-9216-42a7-a942-5be7b30543be\") " Sep 30 12:23:59 crc kubenswrapper[5002]: I0930 12:23:59.919438 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2314206-9216-42a7-a942-5be7b30543be-utilities\") pod \"c2314206-9216-42a7-a942-5be7b30543be\" (UID: \"c2314206-9216-42a7-a942-5be7b30543be\") " Sep 30 12:23:59 crc kubenswrapper[5002]: I0930 12:23:59.919544 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rxj5m\" (UniqueName: \"kubernetes.io/projected/c2314206-9216-42a7-a942-5be7b30543be-kube-api-access-rxj5m\") pod \"c2314206-9216-42a7-a942-5be7b30543be\" (UID: \"c2314206-9216-42a7-a942-5be7b30543be\") " Sep 30 12:23:59 crc kubenswrapper[5002]: I0930 12:23:59.920392 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2314206-9216-42a7-a942-5be7b30543be-utilities" (OuterVolumeSpecName: "utilities") pod "c2314206-9216-42a7-a942-5be7b30543be" (UID: "c2314206-9216-42a7-a942-5be7b30543be"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:23:59 crc kubenswrapper[5002]: I0930 12:23:59.928120 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2314206-9216-42a7-a942-5be7b30543be-kube-api-access-rxj5m" (OuterVolumeSpecName: "kube-api-access-rxj5m") pod "c2314206-9216-42a7-a942-5be7b30543be" (UID: "c2314206-9216-42a7-a942-5be7b30543be"). InnerVolumeSpecName "kube-api-access-rxj5m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:23:59 crc kubenswrapper[5002]: I0930 12:23:59.987400 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2314206-9216-42a7-a942-5be7b30543be-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c2314206-9216-42a7-a942-5be7b30543be" (UID: "c2314206-9216-42a7-a942-5be7b30543be"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:24:00 crc kubenswrapper[5002]: I0930 12:24:00.020336 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2314206-9216-42a7-a942-5be7b30543be-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:00 crc kubenswrapper[5002]: I0930 12:24:00.020364 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2314206-9216-42a7-a942-5be7b30543be-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:00 crc kubenswrapper[5002]: I0930 12:24:00.020377 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rxj5m\" (UniqueName: \"kubernetes.io/projected/c2314206-9216-42a7-a942-5be7b30543be-kube-api-access-rxj5m\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:00 crc kubenswrapper[5002]: I0930 12:24:00.391659 5002 generic.go:334] "Generic (PLEG): container finished" podID="c2314206-9216-42a7-a942-5be7b30543be" containerID="e4b682e03bd32642f952d44a9185cc200f4494cb7ced4ffd87dded1ef1acb6aa" exitCode=0 Sep 30 12:24:00 crc kubenswrapper[5002]: I0930 12:24:00.391798 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7bvwm" Sep 30 12:24:00 crc kubenswrapper[5002]: I0930 12:24:00.391892 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7bvwm" event={"ID":"c2314206-9216-42a7-a942-5be7b30543be","Type":"ContainerDied","Data":"e4b682e03bd32642f952d44a9185cc200f4494cb7ced4ffd87dded1ef1acb6aa"} Sep 30 12:24:00 crc kubenswrapper[5002]: I0930 12:24:00.391939 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7bvwm" event={"ID":"c2314206-9216-42a7-a942-5be7b30543be","Type":"ContainerDied","Data":"e591b6bf63258756e3af2145f0af464bbc2311224ee04e9c03174a4228a2b262"} Sep 30 12:24:00 crc kubenswrapper[5002]: I0930 12:24:00.391969 5002 scope.go:117] "RemoveContainer" containerID="e4b682e03bd32642f952d44a9185cc200f4494cb7ced4ffd87dded1ef1acb6aa" Sep 30 12:24:00 crc kubenswrapper[5002]: I0930 12:24:00.406582 5002 scope.go:117] "RemoveContainer" containerID="80002f93f344ea44b033c0523331ba9ca7937ed19573f388abe3db5c12994984" Sep 30 12:24:00 crc kubenswrapper[5002]: I0930 12:24:00.423658 5002 scope.go:117] "RemoveContainer" containerID="352909dadc59b2f01c68acee6a9df4d173191de2b99d0d3f9606300c6d025af0" Sep 30 12:24:00 crc kubenswrapper[5002]: I0930 12:24:00.424912 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7bvwm"] Sep 30 12:24:00 crc kubenswrapper[5002]: I0930 12:24:00.428629 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7bvwm"] Sep 30 12:24:00 crc kubenswrapper[5002]: I0930 12:24:00.436490 5002 scope.go:117] "RemoveContainer" containerID="e4b682e03bd32642f952d44a9185cc200f4494cb7ced4ffd87dded1ef1acb6aa" Sep 30 12:24:00 crc kubenswrapper[5002]: E0930 12:24:00.437568 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4b682e03bd32642f952d44a9185cc200f4494cb7ced4ffd87dded1ef1acb6aa\": container with ID starting with e4b682e03bd32642f952d44a9185cc200f4494cb7ced4ffd87dded1ef1acb6aa not found: ID does not exist" containerID="e4b682e03bd32642f952d44a9185cc200f4494cb7ced4ffd87dded1ef1acb6aa" Sep 30 12:24:00 crc kubenswrapper[5002]: I0930 12:24:00.437619 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4b682e03bd32642f952d44a9185cc200f4494cb7ced4ffd87dded1ef1acb6aa"} err="failed to get container status \"e4b682e03bd32642f952d44a9185cc200f4494cb7ced4ffd87dded1ef1acb6aa\": rpc error: code = NotFound desc = could not find container \"e4b682e03bd32642f952d44a9185cc200f4494cb7ced4ffd87dded1ef1acb6aa\": container with ID starting with e4b682e03bd32642f952d44a9185cc200f4494cb7ced4ffd87dded1ef1acb6aa not found: ID does not exist" Sep 30 12:24:00 crc kubenswrapper[5002]: I0930 12:24:00.437671 5002 scope.go:117] "RemoveContainer" containerID="80002f93f344ea44b033c0523331ba9ca7937ed19573f388abe3db5c12994984" Sep 30 12:24:00 crc kubenswrapper[5002]: E0930 12:24:00.438119 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80002f93f344ea44b033c0523331ba9ca7937ed19573f388abe3db5c12994984\": container with ID starting with 80002f93f344ea44b033c0523331ba9ca7937ed19573f388abe3db5c12994984 not found: ID does not exist" containerID="80002f93f344ea44b033c0523331ba9ca7937ed19573f388abe3db5c12994984" Sep 30 12:24:00 crc kubenswrapper[5002]: I0930 12:24:00.438153 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80002f93f344ea44b033c0523331ba9ca7937ed19573f388abe3db5c12994984"} err="failed to get container status \"80002f93f344ea44b033c0523331ba9ca7937ed19573f388abe3db5c12994984\": rpc error: code = NotFound desc = could not find container \"80002f93f344ea44b033c0523331ba9ca7937ed19573f388abe3db5c12994984\": container with ID starting with 80002f93f344ea44b033c0523331ba9ca7937ed19573f388abe3db5c12994984 not found: ID does not exist" Sep 30 12:24:00 crc kubenswrapper[5002]: I0930 12:24:00.438174 5002 scope.go:117] "RemoveContainer" containerID="352909dadc59b2f01c68acee6a9df4d173191de2b99d0d3f9606300c6d025af0" Sep 30 12:24:00 crc kubenswrapper[5002]: E0930 12:24:00.438363 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"352909dadc59b2f01c68acee6a9df4d173191de2b99d0d3f9606300c6d025af0\": container with ID starting with 352909dadc59b2f01c68acee6a9df4d173191de2b99d0d3f9606300c6d025af0 not found: ID does not exist" containerID="352909dadc59b2f01c68acee6a9df4d173191de2b99d0d3f9606300c6d025af0" Sep 30 12:24:00 crc kubenswrapper[5002]: I0930 12:24:00.438396 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"352909dadc59b2f01c68acee6a9df4d173191de2b99d0d3f9606300c6d025af0"} err="failed to get container status \"352909dadc59b2f01c68acee6a9df4d173191de2b99d0d3f9606300c6d025af0\": rpc error: code = NotFound desc = could not find container \"352909dadc59b2f01c68acee6a9df4d173191de2b99d0d3f9606300c6d025af0\": container with ID starting with 352909dadc59b2f01c68acee6a9df4d173191de2b99d0d3f9606300c6d025af0 not found: ID does not exist" Sep 30 12:24:00 crc kubenswrapper[5002]: I0930 12:24:00.456941 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rhw4l" Sep 30 12:24:00 crc kubenswrapper[5002]: I0930 12:24:00.683686 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2314206-9216-42a7-a942-5be7b30543be" path="/var/lib/kubelet/pods/c2314206-9216-42a7-a942-5be7b30543be/volumes" Sep 30 12:24:01 crc kubenswrapper[5002]: I0930 12:24:01.724665 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9vlx8"] Sep 30 12:24:01 crc kubenswrapper[5002]: I0930 12:24:01.725155 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-9vlx8" podUID="e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b" containerName="registry-server" containerID="cri-o://0dd90e115e7c0af7f2b7242c1b9d45a7b65b608d6ecb686eb1fad2a7b6d95013" gracePeriod=2 Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.098054 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.098109 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.098149 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.098645 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6"} pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.098697 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" containerID="cri-o://6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6" gracePeriod=600 Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.102083 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9vlx8" Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.145454 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b-catalog-content\") pod \"e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b\" (UID: \"e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b\") " Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.145532 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkqtp\" (UniqueName: \"kubernetes.io/projected/e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b-kube-api-access-jkqtp\") pod \"e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b\" (UID: \"e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b\") " Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.145573 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b-utilities\") pod \"e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b\" (UID: \"e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b\") " Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.146582 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b-utilities" (OuterVolumeSpecName: "utilities") pod "e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b" (UID: "e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.150624 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b-kube-api-access-jkqtp" (OuterVolumeSpecName: "kube-api-access-jkqtp") pod "e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b" (UID: "e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b"). InnerVolumeSpecName "kube-api-access-jkqtp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.157713 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b" (UID: "e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.246439 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkqtp\" (UniqueName: \"kubernetes.io/projected/e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b-kube-api-access-jkqtp\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.246490 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.246504 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.405367 5002 generic.go:334] "Generic (PLEG): container finished" podID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerID="6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6" exitCode=0 Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.405583 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerDied","Data":"6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6"} Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.405812 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerStarted","Data":"a6a0c92d3cdc4d9bfc4039ca649c102e891457b2cdc698fa8e2c73771783ffd2"} Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.409303 5002 generic.go:334] "Generic (PLEG): container finished" podID="e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b" containerID="0dd90e115e7c0af7f2b7242c1b9d45a7b65b608d6ecb686eb1fad2a7b6d95013" exitCode=0 Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.409343 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9vlx8" event={"ID":"e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b","Type":"ContainerDied","Data":"0dd90e115e7c0af7f2b7242c1b9d45a7b65b608d6ecb686eb1fad2a7b6d95013"} Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.409365 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9vlx8" event={"ID":"e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b","Type":"ContainerDied","Data":"8e9c540b9c45b46965d830a68d013366f502af50c4789ceabf951b68b9d364ed"} Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.409384 5002 scope.go:117] "RemoveContainer" containerID="0dd90e115e7c0af7f2b7242c1b9d45a7b65b608d6ecb686eb1fad2a7b6d95013" Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.409388 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9vlx8" Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.424120 5002 scope.go:117] "RemoveContainer" containerID="b63020eede55c46ec02342f4dd863fe7e29d2033fe8b9f29110aae6e6e83b4aa" Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.440350 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9vlx8"] Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.443117 5002 scope.go:117] "RemoveContainer" containerID="250676fd4c16e59bbef337958ae60887d687d64b006ae37a25aed514bd5c9b15" Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.443601 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-9vlx8"] Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.460029 5002 scope.go:117] "RemoveContainer" containerID="0dd90e115e7c0af7f2b7242c1b9d45a7b65b608d6ecb686eb1fad2a7b6d95013" Sep 30 12:24:02 crc kubenswrapper[5002]: E0930 12:24:02.460428 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0dd90e115e7c0af7f2b7242c1b9d45a7b65b608d6ecb686eb1fad2a7b6d95013\": container with ID starting with 0dd90e115e7c0af7f2b7242c1b9d45a7b65b608d6ecb686eb1fad2a7b6d95013 not found: ID does not exist" containerID="0dd90e115e7c0af7f2b7242c1b9d45a7b65b608d6ecb686eb1fad2a7b6d95013" Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.460491 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0dd90e115e7c0af7f2b7242c1b9d45a7b65b608d6ecb686eb1fad2a7b6d95013"} err="failed to get container status \"0dd90e115e7c0af7f2b7242c1b9d45a7b65b608d6ecb686eb1fad2a7b6d95013\": rpc error: code = NotFound desc = could not find container \"0dd90e115e7c0af7f2b7242c1b9d45a7b65b608d6ecb686eb1fad2a7b6d95013\": container with ID starting with 0dd90e115e7c0af7f2b7242c1b9d45a7b65b608d6ecb686eb1fad2a7b6d95013 not found: ID does not exist" Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.460522 5002 scope.go:117] "RemoveContainer" containerID="b63020eede55c46ec02342f4dd863fe7e29d2033fe8b9f29110aae6e6e83b4aa" Sep 30 12:24:02 crc kubenswrapper[5002]: E0930 12:24:02.460828 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b63020eede55c46ec02342f4dd863fe7e29d2033fe8b9f29110aae6e6e83b4aa\": container with ID starting with b63020eede55c46ec02342f4dd863fe7e29d2033fe8b9f29110aae6e6e83b4aa not found: ID does not exist" containerID="b63020eede55c46ec02342f4dd863fe7e29d2033fe8b9f29110aae6e6e83b4aa" Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.460866 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b63020eede55c46ec02342f4dd863fe7e29d2033fe8b9f29110aae6e6e83b4aa"} err="failed to get container status \"b63020eede55c46ec02342f4dd863fe7e29d2033fe8b9f29110aae6e6e83b4aa\": rpc error: code = NotFound desc = could not find container \"b63020eede55c46ec02342f4dd863fe7e29d2033fe8b9f29110aae6e6e83b4aa\": container with ID starting with b63020eede55c46ec02342f4dd863fe7e29d2033fe8b9f29110aae6e6e83b4aa not found: ID does not exist" Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.460892 5002 scope.go:117] "RemoveContainer" containerID="250676fd4c16e59bbef337958ae60887d687d64b006ae37a25aed514bd5c9b15" Sep 30 12:24:02 crc kubenswrapper[5002]: E0930 12:24:02.461183 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"250676fd4c16e59bbef337958ae60887d687d64b006ae37a25aed514bd5c9b15\": container with ID starting with 250676fd4c16e59bbef337958ae60887d687d64b006ae37a25aed514bd5c9b15 not found: ID does not exist" containerID="250676fd4c16e59bbef337958ae60887d687d64b006ae37a25aed514bd5c9b15" Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.461220 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"250676fd4c16e59bbef337958ae60887d687d64b006ae37a25aed514bd5c9b15"} err="failed to get container status \"250676fd4c16e59bbef337958ae60887d687d64b006ae37a25aed514bd5c9b15\": rpc error: code = NotFound desc = could not find container \"250676fd4c16e59bbef337958ae60887d687d64b006ae37a25aed514bd5c9b15\": container with ID starting with 250676fd4c16e59bbef337958ae60887d687d64b006ae37a25aed514bd5c9b15 not found: ID does not exist" Sep 30 12:24:02 crc kubenswrapper[5002]: I0930 12:24:02.682909 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b" path="/var/lib/kubelet/pods/e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b/volumes" Sep 30 12:24:07 crc kubenswrapper[5002]: I0930 12:24:07.859050 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-wk84c"] Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.460188 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" podUID="87454cc1-5175-495b-9551-a19474d51e4a" containerName="oauth-openshift" containerID="cri-o://f0ef6708f1f414ffd0dfc09c2763631d0ef154c8d6a5db080fed97bc7ee6ea04" gracePeriod=15 Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.870033 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.903088 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-575cc5b957-7nnvr"] Sep 30 12:24:33 crc kubenswrapper[5002]: E0930 12:24:33.904737 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2314206-9216-42a7-a942-5be7b30543be" containerName="extract-utilities" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.904756 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2314206-9216-42a7-a942-5be7b30543be" containerName="extract-utilities" Sep 30 12:24:33 crc kubenswrapper[5002]: E0930 12:24:33.904765 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559" containerName="extract-utilities" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.904771 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559" containerName="extract-utilities" Sep 30 12:24:33 crc kubenswrapper[5002]: E0930 12:24:33.904784 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559" containerName="registry-server" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.904791 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559" containerName="registry-server" Sep 30 12:24:33 crc kubenswrapper[5002]: E0930 12:24:33.904798 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35381678-7a1b-4eab-9e1a-64b75b8c0eba" containerName="pruner" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.904805 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="35381678-7a1b-4eab-9e1a-64b75b8c0eba" containerName="pruner" Sep 30 12:24:33 crc kubenswrapper[5002]: E0930 12:24:33.904814 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b" containerName="extract-utilities" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.904819 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b" containerName="extract-utilities" Sep 30 12:24:33 crc kubenswrapper[5002]: E0930 12:24:33.904827 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2314206-9216-42a7-a942-5be7b30543be" containerName="registry-server" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.904832 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2314206-9216-42a7-a942-5be7b30543be" containerName="registry-server" Sep 30 12:24:33 crc kubenswrapper[5002]: E0930 12:24:33.904840 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b09a25e-9b24-4ea4-a5a2-d3155a125d36" containerName="extract-content" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.904846 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b09a25e-9b24-4ea4-a5a2-d3155a125d36" containerName="extract-content" Sep 30 12:24:33 crc kubenswrapper[5002]: E0930 12:24:33.904980 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b09a25e-9b24-4ea4-a5a2-d3155a125d36" containerName="registry-server" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.904990 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b09a25e-9b24-4ea4-a5a2-d3155a125d36" containerName="registry-server" Sep 30 12:24:33 crc kubenswrapper[5002]: E0930 12:24:33.905000 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b" containerName="extract-content" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.905006 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b" containerName="extract-content" Sep 30 12:24:33 crc kubenswrapper[5002]: E0930 12:24:33.905038 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2314206-9216-42a7-a942-5be7b30543be" containerName="extract-content" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.905046 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2314206-9216-42a7-a942-5be7b30543be" containerName="extract-content" Sep 30 12:24:33 crc kubenswrapper[5002]: E0930 12:24:33.905055 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87454cc1-5175-495b-9551-a19474d51e4a" containerName="oauth-openshift" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.905060 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="87454cc1-5175-495b-9551-a19474d51e4a" containerName="oauth-openshift" Sep 30 12:24:33 crc kubenswrapper[5002]: E0930 12:24:33.905068 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b09a25e-9b24-4ea4-a5a2-d3155a125d36" containerName="extract-utilities" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.905074 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b09a25e-9b24-4ea4-a5a2-d3155a125d36" containerName="extract-utilities" Sep 30 12:24:33 crc kubenswrapper[5002]: E0930 12:24:33.905081 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b" containerName="registry-server" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.905087 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b" containerName="registry-server" Sep 30 12:24:33 crc kubenswrapper[5002]: E0930 12:24:33.905095 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559" containerName="extract-content" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.905101 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559" containerName="extract-content" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.905199 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5ef4b78-d57a-4e71-b275-ac5a2ca7ab0b" containerName="registry-server" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.905210 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2314206-9216-42a7-a942-5be7b30543be" containerName="registry-server" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.905224 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b09a25e-9b24-4ea4-a5a2-d3155a125d36" containerName="registry-server" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.905233 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="87454cc1-5175-495b-9551-a19474d51e4a" containerName="oauth-openshift" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.905243 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="35381678-7a1b-4eab-9e1a-64b75b8c0eba" containerName="pruner" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.905249 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="78cdb7b1-d7cc-4c42-b2c0-7cd3e4f6c559" containerName="registry-server" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.905642 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.908239 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-575cc5b957-7nnvr"] Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.967022 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xtlqs\" (UniqueName: \"kubernetes.io/projected/87454cc1-5175-495b-9551-a19474d51e4a-kube-api-access-xtlqs\") pod \"87454cc1-5175-495b-9551-a19474d51e4a\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.967131 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-service-ca\") pod \"87454cc1-5175-495b-9551-a19474d51e4a\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.967157 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-user-template-login\") pod \"87454cc1-5175-495b-9551-a19474d51e4a\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.967219 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-router-certs\") pod \"87454cc1-5175-495b-9551-a19474d51e4a\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.967249 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/87454cc1-5175-495b-9551-a19474d51e4a-audit-policies\") pod \"87454cc1-5175-495b-9551-a19474d51e4a\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.967453 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-system-service-ca\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.967510 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.967534 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.967575 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.967612 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/095e48d0-c414-4007-af17-1e82ee8ac6b7-audit-policies\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.967656 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.967687 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-system-router-certs\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.968410 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-system-serving-cert\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.968629 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/095e48d0-c414-4007-af17-1e82ee8ac6b7-audit-dir\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.968836 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-user-template-error\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.968996 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-user-template-login\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.969157 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dltsh\" (UniqueName: \"kubernetes.io/projected/095e48d0-c414-4007-af17-1e82ee8ac6b7-kube-api-access-dltsh\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.969283 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-system-cliconfig\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.969428 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-system-session\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.968492 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87454cc1-5175-495b-9551-a19474d51e4a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "87454cc1-5175-495b-9551-a19474d51e4a" (UID: "87454cc1-5175-495b-9551-a19474d51e4a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.968520 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "87454cc1-5175-495b-9551-a19474d51e4a" (UID: "87454cc1-5175-495b-9551-a19474d51e4a"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.973747 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87454cc1-5175-495b-9551-a19474d51e4a-kube-api-access-xtlqs" (OuterVolumeSpecName: "kube-api-access-xtlqs") pod "87454cc1-5175-495b-9551-a19474d51e4a" (UID: "87454cc1-5175-495b-9551-a19474d51e4a"). InnerVolumeSpecName "kube-api-access-xtlqs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.973771 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "87454cc1-5175-495b-9551-a19474d51e4a" (UID: "87454cc1-5175-495b-9551-a19474d51e4a"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:24:33 crc kubenswrapper[5002]: I0930 12:24:33.975727 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "87454cc1-5175-495b-9551-a19474d51e4a" (UID: "87454cc1-5175-495b-9551-a19474d51e4a"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.070669 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-session\") pod \"87454cc1-5175-495b-9551-a19474d51e4a\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.070758 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/87454cc1-5175-495b-9551-a19474d51e4a-audit-dir\") pod \"87454cc1-5175-495b-9551-a19474d51e4a\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.070951 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/87454cc1-5175-495b-9551-a19474d51e4a-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "87454cc1-5175-495b-9551-a19474d51e4a" (UID: "87454cc1-5175-495b-9551-a19474d51e4a"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.071333 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-trusted-ca-bundle\") pod \"87454cc1-5175-495b-9551-a19474d51e4a\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.071404 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-user-idp-0-file-data\") pod \"87454cc1-5175-495b-9551-a19474d51e4a\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.071455 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-cliconfig\") pod \"87454cc1-5175-495b-9551-a19474d51e4a\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.071571 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-serving-cert\") pod \"87454cc1-5175-495b-9551-a19474d51e4a\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.071688 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-ocp-branding-template\") pod \"87454cc1-5175-495b-9551-a19474d51e4a\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.071751 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-user-template-provider-selection\") pod \"87454cc1-5175-495b-9551-a19474d51e4a\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.071822 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-user-template-error\") pod \"87454cc1-5175-495b-9551-a19474d51e4a\" (UID: \"87454cc1-5175-495b-9551-a19474d51e4a\") " Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.072000 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-system-service-ca\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.072057 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.072108 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.072126 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "87454cc1-5175-495b-9551-a19474d51e4a" (UID: "87454cc1-5175-495b-9551-a19474d51e4a"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.072163 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.072236 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/095e48d0-c414-4007-af17-1e82ee8ac6b7-audit-policies\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.072286 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.072350 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-system-router-certs\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.072412 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-system-serving-cert\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.072459 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/095e48d0-c414-4007-af17-1e82ee8ac6b7-audit-dir\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.072571 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-user-template-error\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.072599 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "87454cc1-5175-495b-9551-a19474d51e4a" (UID: "87454cc1-5175-495b-9551-a19474d51e4a"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.072620 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-user-template-login\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.072713 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dltsh\" (UniqueName: \"kubernetes.io/projected/095e48d0-c414-4007-af17-1e82ee8ac6b7-kube-api-access-dltsh\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.072771 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-system-cliconfig\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.072821 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-system-session\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.072951 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.072986 5002 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/87454cc1-5175-495b-9551-a19474d51e4a-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.073019 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.073048 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.073079 5002 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/87454cc1-5175-495b-9551-a19474d51e4a-audit-dir\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.073553 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xtlqs\" (UniqueName: \"kubernetes.io/projected/87454cc1-5175-495b-9551-a19474d51e4a-kube-api-access-xtlqs\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.073595 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.073623 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.073048 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-system-service-ca\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.076879 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "87454cc1-5175-495b-9551-a19474d51e4a" (UID: "87454cc1-5175-495b-9551-a19474d51e4a"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.077851 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "87454cc1-5175-495b-9551-a19474d51e4a" (UID: "87454cc1-5175-495b-9551-a19474d51e4a"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.078103 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.078295 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/095e48d0-c414-4007-af17-1e82ee8ac6b7-audit-dir\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.078692 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "87454cc1-5175-495b-9551-a19474d51e4a" (UID: "87454cc1-5175-495b-9551-a19474d51e4a"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.079752 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "87454cc1-5175-495b-9551-a19474d51e4a" (UID: "87454cc1-5175-495b-9551-a19474d51e4a"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.080204 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/095e48d0-c414-4007-af17-1e82ee8ac6b7-audit-policies\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.080740 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-system-cliconfig\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.082212 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "87454cc1-5175-495b-9551-a19474d51e4a" (UID: "87454cc1-5175-495b-9551-a19474d51e4a"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.082278 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "87454cc1-5175-495b-9551-a19474d51e4a" (UID: "87454cc1-5175-495b-9551-a19474d51e4a"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.082842 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-system-router-certs\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.083121 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-system-serving-cert\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.083264 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.083838 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-system-session\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.084618 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-user-template-error\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.084767 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.085232 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.086203 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/095e48d0-c414-4007-af17-1e82ee8ac6b7-v4-0-config-user-template-login\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.103515 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dltsh\" (UniqueName: \"kubernetes.io/projected/095e48d0-c414-4007-af17-1e82ee8ac6b7-kube-api-access-dltsh\") pod \"oauth-openshift-575cc5b957-7nnvr\" (UID: \"095e48d0-c414-4007-af17-1e82ee8ac6b7\") " pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.175291 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.176146 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.176368 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.176653 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.176887 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.177082 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/87454cc1-5175-495b-9551-a19474d51e4a-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.224719 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.472322 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-575cc5b957-7nnvr"] Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.588221 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" event={"ID":"095e48d0-c414-4007-af17-1e82ee8ac6b7","Type":"ContainerStarted","Data":"50ddc5a4e807dc7a97e006f046f15a631f514a1220b4ff4ed6ec6e2004a83bec"} Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.590633 5002 generic.go:334] "Generic (PLEG): container finished" podID="87454cc1-5175-495b-9551-a19474d51e4a" containerID="f0ef6708f1f414ffd0dfc09c2763631d0ef154c8d6a5db080fed97bc7ee6ea04" exitCode=0 Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.590686 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" event={"ID":"87454cc1-5175-495b-9551-a19474d51e4a","Type":"ContainerDied","Data":"f0ef6708f1f414ffd0dfc09c2763631d0ef154c8d6a5db080fed97bc7ee6ea04"} Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.590719 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" event={"ID":"87454cc1-5175-495b-9551-a19474d51e4a","Type":"ContainerDied","Data":"67210a5e5e2f75d82525924fe0045c80e621408d5844dbe0f814c4fd9ace1eaf"} Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.590739 5002 scope.go:117] "RemoveContainer" containerID="f0ef6708f1f414ffd0dfc09c2763631d0ef154c8d6a5db080fed97bc7ee6ea04" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.590693 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-wk84c" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.640745 5002 scope.go:117] "RemoveContainer" containerID="f0ef6708f1f414ffd0dfc09c2763631d0ef154c8d6a5db080fed97bc7ee6ea04" Sep 30 12:24:34 crc kubenswrapper[5002]: E0930 12:24:34.641172 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0ef6708f1f414ffd0dfc09c2763631d0ef154c8d6a5db080fed97bc7ee6ea04\": container with ID starting with f0ef6708f1f414ffd0dfc09c2763631d0ef154c8d6a5db080fed97bc7ee6ea04 not found: ID does not exist" containerID="f0ef6708f1f414ffd0dfc09c2763631d0ef154c8d6a5db080fed97bc7ee6ea04" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.641209 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0ef6708f1f414ffd0dfc09c2763631d0ef154c8d6a5db080fed97bc7ee6ea04"} err="failed to get container status \"f0ef6708f1f414ffd0dfc09c2763631d0ef154c8d6a5db080fed97bc7ee6ea04\": rpc error: code = NotFound desc = could not find container \"f0ef6708f1f414ffd0dfc09c2763631d0ef154c8d6a5db080fed97bc7ee6ea04\": container with ID starting with f0ef6708f1f414ffd0dfc09c2763631d0ef154c8d6a5db080fed97bc7ee6ea04 not found: ID does not exist" Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.668121 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-wk84c"] Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.670700 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-wk84c"] Sep 30 12:24:34 crc kubenswrapper[5002]: I0930 12:24:34.683538 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87454cc1-5175-495b-9551-a19474d51e4a" path="/var/lib/kubelet/pods/87454cc1-5175-495b-9551-a19474d51e4a/volumes" Sep 30 12:24:35 crc kubenswrapper[5002]: I0930 12:24:35.604796 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" event={"ID":"095e48d0-c414-4007-af17-1e82ee8ac6b7","Type":"ContainerStarted","Data":"84151b61d120dbf5ac6b249db7ed78ef2f15b9c426b7eb28a2b120e30b663a04"} Sep 30 12:24:35 crc kubenswrapper[5002]: I0930 12:24:35.605125 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:35 crc kubenswrapper[5002]: I0930 12:24:35.611640 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" Sep 30 12:24:35 crc kubenswrapper[5002]: I0930 12:24:35.635230 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-575cc5b957-7nnvr" podStartSLOduration=28.635211389 podStartE2EDuration="28.635211389s" podCreationTimestamp="2025-09-30 12:24:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:24:35.631865695 +0000 UTC m=+249.881547841" watchObservedRunningTime="2025-09-30 12:24:35.635211389 +0000 UTC m=+249.884893535" Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.506168 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ccfzv"] Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.506998 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-ccfzv" podUID="1c74574a-77fc-4f9f-93ff-ff2b64f27312" containerName="registry-server" containerID="cri-o://22e6d1ee232fc55ac08b464dd995aee3fdc0646bde92ee9db0508e19a4a0aa1b" gracePeriod=30 Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.515491 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5w8h8"] Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.516051 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-5w8h8" podUID="00884b54-ca12-4a91-a28b-fcd78a870b68" containerName="registry-server" containerID="cri-o://3d9aa45d8dfc0b444319a64ff324caf2387c6fe5026294502cd422f5962386b7" gracePeriod=30 Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.528184 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hc84k"] Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.528437 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-hc84k" podUID="acdc25a7-3353-430f-b856-22a1259025ee" containerName="marketplace-operator" containerID="cri-o://8c0fd80db2f419455eeefa0ccffbd4839f4c68065259cdc4c07a58d4e26d5d31" gracePeriod=30 Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.539536 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fr4sd"] Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.540049 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fr4sd" podUID="7a6520d7-b4d9-4837-a574-48cb86ee6231" containerName="registry-server" containerID="cri-o://607c13ea530d1231d27bd3a7b78bc5138fdfed7e6787463c85338519c1f8a3ab" gracePeriod=30 Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.554665 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rhw4l"] Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.554906 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rhw4l" podUID="30cf48ad-b568-4511-9877-bad2837d969e" containerName="registry-server" containerID="cri-o://477e0350dfa79121d49c81cf03f6c77e4d09b78d1438b5663732eef0e250f271" gracePeriod=30 Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.560329 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hrqrc"] Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.561589 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hrqrc" Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.569678 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/75fd37d6-6c7a-4c39-8ed0-c9ba6caa2b7f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hrqrc\" (UID: \"75fd37d6-6c7a-4c39-8ed0-c9ba6caa2b7f\") " pod="openshift-marketplace/marketplace-operator-79b997595-hrqrc" Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.569995 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqgs4\" (UniqueName: \"kubernetes.io/projected/75fd37d6-6c7a-4c39-8ed0-c9ba6caa2b7f-kube-api-access-jqgs4\") pod \"marketplace-operator-79b997595-hrqrc\" (UID: \"75fd37d6-6c7a-4c39-8ed0-c9ba6caa2b7f\") " pod="openshift-marketplace/marketplace-operator-79b997595-hrqrc" Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.570569 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/75fd37d6-6c7a-4c39-8ed0-c9ba6caa2b7f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hrqrc\" (UID: \"75fd37d6-6c7a-4c39-8ed0-c9ba6caa2b7f\") " pod="openshift-marketplace/marketplace-operator-79b997595-hrqrc" Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.577513 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hrqrc"] Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.672033 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/75fd37d6-6c7a-4c39-8ed0-c9ba6caa2b7f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hrqrc\" (UID: \"75fd37d6-6c7a-4c39-8ed0-c9ba6caa2b7f\") " pod="openshift-marketplace/marketplace-operator-79b997595-hrqrc" Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.672117 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/75fd37d6-6c7a-4c39-8ed0-c9ba6caa2b7f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hrqrc\" (UID: \"75fd37d6-6c7a-4c39-8ed0-c9ba6caa2b7f\") " pod="openshift-marketplace/marketplace-operator-79b997595-hrqrc" Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.672146 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqgs4\" (UniqueName: \"kubernetes.io/projected/75fd37d6-6c7a-4c39-8ed0-c9ba6caa2b7f-kube-api-access-jqgs4\") pod \"marketplace-operator-79b997595-hrqrc\" (UID: \"75fd37d6-6c7a-4c39-8ed0-c9ba6caa2b7f\") " pod="openshift-marketplace/marketplace-operator-79b997595-hrqrc" Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.673986 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/75fd37d6-6c7a-4c39-8ed0-c9ba6caa2b7f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hrqrc\" (UID: \"75fd37d6-6c7a-4c39-8ed0-c9ba6caa2b7f\") " pod="openshift-marketplace/marketplace-operator-79b997595-hrqrc" Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.678841 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/75fd37d6-6c7a-4c39-8ed0-c9ba6caa2b7f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hrqrc\" (UID: \"75fd37d6-6c7a-4c39-8ed0-c9ba6caa2b7f\") " pod="openshift-marketplace/marketplace-operator-79b997595-hrqrc" Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.690689 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqgs4\" (UniqueName: \"kubernetes.io/projected/75fd37d6-6c7a-4c39-8ed0-c9ba6caa2b7f-kube-api-access-jqgs4\") pod \"marketplace-operator-79b997595-hrqrc\" (UID: \"75fd37d6-6c7a-4c39-8ed0-c9ba6caa2b7f\") " pod="openshift-marketplace/marketplace-operator-79b997595-hrqrc" Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.734085 5002 generic.go:334] "Generic (PLEG): container finished" podID="7a6520d7-b4d9-4837-a574-48cb86ee6231" containerID="607c13ea530d1231d27bd3a7b78bc5138fdfed7e6787463c85338519c1f8a3ab" exitCode=0 Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.734358 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fr4sd" event={"ID":"7a6520d7-b4d9-4837-a574-48cb86ee6231","Type":"ContainerDied","Data":"607c13ea530d1231d27bd3a7b78bc5138fdfed7e6787463c85338519c1f8a3ab"} Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.738123 5002 generic.go:334] "Generic (PLEG): container finished" podID="acdc25a7-3353-430f-b856-22a1259025ee" containerID="8c0fd80db2f419455eeefa0ccffbd4839f4c68065259cdc4c07a58d4e26d5d31" exitCode=0 Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.738195 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hc84k" event={"ID":"acdc25a7-3353-430f-b856-22a1259025ee","Type":"ContainerDied","Data":"8c0fd80db2f419455eeefa0ccffbd4839f4c68065259cdc4c07a58d4e26d5d31"} Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.743340 5002 generic.go:334] "Generic (PLEG): container finished" podID="00884b54-ca12-4a91-a28b-fcd78a870b68" containerID="3d9aa45d8dfc0b444319a64ff324caf2387c6fe5026294502cd422f5962386b7" exitCode=0 Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.743368 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5w8h8" event={"ID":"00884b54-ca12-4a91-a28b-fcd78a870b68","Type":"ContainerDied","Data":"3d9aa45d8dfc0b444319a64ff324caf2387c6fe5026294502cd422f5962386b7"} Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.747334 5002 generic.go:334] "Generic (PLEG): container finished" podID="1c74574a-77fc-4f9f-93ff-ff2b64f27312" containerID="22e6d1ee232fc55ac08b464dd995aee3fdc0646bde92ee9db0508e19a4a0aa1b" exitCode=0 Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.747412 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ccfzv" event={"ID":"1c74574a-77fc-4f9f-93ff-ff2b64f27312","Type":"ContainerDied","Data":"22e6d1ee232fc55ac08b464dd995aee3fdc0646bde92ee9db0508e19a4a0aa1b"} Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.751776 5002 generic.go:334] "Generic (PLEG): container finished" podID="30cf48ad-b568-4511-9877-bad2837d969e" containerID="477e0350dfa79121d49c81cf03f6c77e4d09b78d1438b5663732eef0e250f271" exitCode=0 Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.751863 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rhw4l" event={"ID":"30cf48ad-b568-4511-9877-bad2837d969e","Type":"ContainerDied","Data":"477e0350dfa79121d49c81cf03f6c77e4d09b78d1438b5663732eef0e250f271"} Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.878693 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hrqrc" Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.978831 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5w8h8" Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.990821 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ccfzv" Sep 30 12:24:55 crc kubenswrapper[5002]: I0930 12:24:55.991746 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hc84k" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.005658 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fr4sd" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.011497 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rhw4l" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.077860 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a6520d7-b4d9-4837-a574-48cb86ee6231-utilities\") pod \"7a6520d7-b4d9-4837-a574-48cb86ee6231\" (UID: \"7a6520d7-b4d9-4837-a574-48cb86ee6231\") " Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.077902 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30cf48ad-b568-4511-9877-bad2837d969e-catalog-content\") pod \"30cf48ad-b568-4511-9877-bad2837d969e\" (UID: \"30cf48ad-b568-4511-9877-bad2837d969e\") " Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.077926 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wcdgq\" (UniqueName: \"kubernetes.io/projected/acdc25a7-3353-430f-b856-22a1259025ee-kube-api-access-wcdgq\") pod \"acdc25a7-3353-430f-b856-22a1259025ee\" (UID: \"acdc25a7-3353-430f-b856-22a1259025ee\") " Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.077945 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/acdc25a7-3353-430f-b856-22a1259025ee-marketplace-operator-metrics\") pod \"acdc25a7-3353-430f-b856-22a1259025ee\" (UID: \"acdc25a7-3353-430f-b856-22a1259025ee\") " Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.077967 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9vglg\" (UniqueName: \"kubernetes.io/projected/30cf48ad-b568-4511-9877-bad2837d969e-kube-api-access-9vglg\") pod \"30cf48ad-b568-4511-9877-bad2837d969e\" (UID: \"30cf48ad-b568-4511-9877-bad2837d969e\") " Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.077985 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00884b54-ca12-4a91-a28b-fcd78a870b68-catalog-content\") pod \"00884b54-ca12-4a91-a28b-fcd78a870b68\" (UID: \"00884b54-ca12-4a91-a28b-fcd78a870b68\") " Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.078021 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00884b54-ca12-4a91-a28b-fcd78a870b68-utilities\") pod \"00884b54-ca12-4a91-a28b-fcd78a870b68\" (UID: \"00884b54-ca12-4a91-a28b-fcd78a870b68\") " Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.078038 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a6520d7-b4d9-4837-a574-48cb86ee6231-catalog-content\") pod \"7a6520d7-b4d9-4837-a574-48cb86ee6231\" (UID: \"7a6520d7-b4d9-4837-a574-48cb86ee6231\") " Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.078052 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c74574a-77fc-4f9f-93ff-ff2b64f27312-catalog-content\") pod \"1c74574a-77fc-4f9f-93ff-ff2b64f27312\" (UID: \"1c74574a-77fc-4f9f-93ff-ff2b64f27312\") " Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.078073 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c74574a-77fc-4f9f-93ff-ff2b64f27312-utilities\") pod \"1c74574a-77fc-4f9f-93ff-ff2b64f27312\" (UID: \"1c74574a-77fc-4f9f-93ff-ff2b64f27312\") " Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.078089 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rf2nr\" (UniqueName: \"kubernetes.io/projected/1c74574a-77fc-4f9f-93ff-ff2b64f27312-kube-api-access-rf2nr\") pod \"1c74574a-77fc-4f9f-93ff-ff2b64f27312\" (UID: \"1c74574a-77fc-4f9f-93ff-ff2b64f27312\") " Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.078111 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vw9t5\" (UniqueName: \"kubernetes.io/projected/7a6520d7-b4d9-4837-a574-48cb86ee6231-kube-api-access-vw9t5\") pod \"7a6520d7-b4d9-4837-a574-48cb86ee6231\" (UID: \"7a6520d7-b4d9-4837-a574-48cb86ee6231\") " Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.078135 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30cf48ad-b568-4511-9877-bad2837d969e-utilities\") pod \"30cf48ad-b568-4511-9877-bad2837d969e\" (UID: \"30cf48ad-b568-4511-9877-bad2837d969e\") " Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.078154 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/acdc25a7-3353-430f-b856-22a1259025ee-marketplace-trusted-ca\") pod \"acdc25a7-3353-430f-b856-22a1259025ee\" (UID: \"acdc25a7-3353-430f-b856-22a1259025ee\") " Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.078172 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4mplt\" (UniqueName: \"kubernetes.io/projected/00884b54-ca12-4a91-a28b-fcd78a870b68-kube-api-access-4mplt\") pod \"00884b54-ca12-4a91-a28b-fcd78a870b68\" (UID: \"00884b54-ca12-4a91-a28b-fcd78a870b68\") " Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.078885 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00884b54-ca12-4a91-a28b-fcd78a870b68-utilities" (OuterVolumeSpecName: "utilities") pod "00884b54-ca12-4a91-a28b-fcd78a870b68" (UID: "00884b54-ca12-4a91-a28b-fcd78a870b68"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.079410 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a6520d7-b4d9-4837-a574-48cb86ee6231-utilities" (OuterVolumeSpecName: "utilities") pod "7a6520d7-b4d9-4837-a574-48cb86ee6231" (UID: "7a6520d7-b4d9-4837-a574-48cb86ee6231"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.080243 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30cf48ad-b568-4511-9877-bad2837d969e-utilities" (OuterVolumeSpecName: "utilities") pod "30cf48ad-b568-4511-9877-bad2837d969e" (UID: "30cf48ad-b568-4511-9877-bad2837d969e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.082895 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/acdc25a7-3353-430f-b856-22a1259025ee-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "acdc25a7-3353-430f-b856-22a1259025ee" (UID: "acdc25a7-3353-430f-b856-22a1259025ee"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.083813 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c74574a-77fc-4f9f-93ff-ff2b64f27312-utilities" (OuterVolumeSpecName: "utilities") pod "1c74574a-77fc-4f9f-93ff-ff2b64f27312" (UID: "1c74574a-77fc-4f9f-93ff-ff2b64f27312"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.084134 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c74574a-77fc-4f9f-93ff-ff2b64f27312-kube-api-access-rf2nr" (OuterVolumeSpecName: "kube-api-access-rf2nr") pod "1c74574a-77fc-4f9f-93ff-ff2b64f27312" (UID: "1c74574a-77fc-4f9f-93ff-ff2b64f27312"). InnerVolumeSpecName "kube-api-access-rf2nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.084194 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00884b54-ca12-4a91-a28b-fcd78a870b68-kube-api-access-4mplt" (OuterVolumeSpecName: "kube-api-access-4mplt") pod "00884b54-ca12-4a91-a28b-fcd78a870b68" (UID: "00884b54-ca12-4a91-a28b-fcd78a870b68"). InnerVolumeSpecName "kube-api-access-4mplt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.097853 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30cf48ad-b568-4511-9877-bad2837d969e-kube-api-access-9vglg" (OuterVolumeSpecName: "kube-api-access-9vglg") pod "30cf48ad-b568-4511-9877-bad2837d969e" (UID: "30cf48ad-b568-4511-9877-bad2837d969e"). InnerVolumeSpecName "kube-api-access-9vglg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.100438 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/acdc25a7-3353-430f-b856-22a1259025ee-kube-api-access-wcdgq" (OuterVolumeSpecName: "kube-api-access-wcdgq") pod "acdc25a7-3353-430f-b856-22a1259025ee" (UID: "acdc25a7-3353-430f-b856-22a1259025ee"). InnerVolumeSpecName "kube-api-access-wcdgq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.102767 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a6520d7-b4d9-4837-a574-48cb86ee6231-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7a6520d7-b4d9-4837-a574-48cb86ee6231" (UID: "7a6520d7-b4d9-4837-a574-48cb86ee6231"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.104816 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acdc25a7-3353-430f-b856-22a1259025ee-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "acdc25a7-3353-430f-b856-22a1259025ee" (UID: "acdc25a7-3353-430f-b856-22a1259025ee"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.114625 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a6520d7-b4d9-4837-a574-48cb86ee6231-kube-api-access-vw9t5" (OuterVolumeSpecName: "kube-api-access-vw9t5") pod "7a6520d7-b4d9-4837-a574-48cb86ee6231" (UID: "7a6520d7-b4d9-4837-a574-48cb86ee6231"). InnerVolumeSpecName "kube-api-access-vw9t5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.154872 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00884b54-ca12-4a91-a28b-fcd78a870b68-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "00884b54-ca12-4a91-a28b-fcd78a870b68" (UID: "00884b54-ca12-4a91-a28b-fcd78a870b68"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.156137 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c74574a-77fc-4f9f-93ff-ff2b64f27312-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1c74574a-77fc-4f9f-93ff-ff2b64f27312" (UID: "1c74574a-77fc-4f9f-93ff-ff2b64f27312"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.173735 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30cf48ad-b568-4511-9877-bad2837d969e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "30cf48ad-b568-4511-9877-bad2837d969e" (UID: "30cf48ad-b568-4511-9877-bad2837d969e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.178659 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00884b54-ca12-4a91-a28b-fcd78a870b68-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.178695 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a6520d7-b4d9-4837-a574-48cb86ee6231-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.178709 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c74574a-77fc-4f9f-93ff-ff2b64f27312-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.178720 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c74574a-77fc-4f9f-93ff-ff2b64f27312-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.178731 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rf2nr\" (UniqueName: \"kubernetes.io/projected/1c74574a-77fc-4f9f-93ff-ff2b64f27312-kube-api-access-rf2nr\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.178743 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vw9t5\" (UniqueName: \"kubernetes.io/projected/7a6520d7-b4d9-4837-a574-48cb86ee6231-kube-api-access-vw9t5\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.178754 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30cf48ad-b568-4511-9877-bad2837d969e-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.178765 5002 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/acdc25a7-3353-430f-b856-22a1259025ee-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.178776 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4mplt\" (UniqueName: \"kubernetes.io/projected/00884b54-ca12-4a91-a28b-fcd78a870b68-kube-api-access-4mplt\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.178787 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30cf48ad-b568-4511-9877-bad2837d969e-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.178799 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a6520d7-b4d9-4837-a574-48cb86ee6231-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.178811 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wcdgq\" (UniqueName: \"kubernetes.io/projected/acdc25a7-3353-430f-b856-22a1259025ee-kube-api-access-wcdgq\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.178823 5002 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/acdc25a7-3353-430f-b856-22a1259025ee-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.178835 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9vglg\" (UniqueName: \"kubernetes.io/projected/30cf48ad-b568-4511-9877-bad2837d969e-kube-api-access-9vglg\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.178846 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00884b54-ca12-4a91-a28b-fcd78a870b68-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.316558 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hrqrc"] Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.758524 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hrqrc" event={"ID":"75fd37d6-6c7a-4c39-8ed0-c9ba6caa2b7f","Type":"ContainerStarted","Data":"5f305b2950db7f618415f560b379d6844e38a613e8391d1f3cad28e9c33056b9"} Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.759631 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-hrqrc" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.759653 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hrqrc" event={"ID":"75fd37d6-6c7a-4c39-8ed0-c9ba6caa2b7f","Type":"ContainerStarted","Data":"167c7684a5a9e70563385cc1d8ec25936de1d2e0f58aa621b114675d67e356cc"} Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.761067 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fr4sd" event={"ID":"7a6520d7-b4d9-4837-a574-48cb86ee6231","Type":"ContainerDied","Data":"6f66e4c9389dfaf2b19fd9ef735675b9426a0ebb4d952da8d4beeb0a58c999da"} Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.761126 5002 scope.go:117] "RemoveContainer" containerID="607c13ea530d1231d27bd3a7b78bc5138fdfed7e6787463c85338519c1f8a3ab" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.761085 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fr4sd" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.762880 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hc84k" event={"ID":"acdc25a7-3353-430f-b856-22a1259025ee","Type":"ContainerDied","Data":"26bf182a8d4a68ad62bf56999449f1e36c710f8ca51a232887e7e0c93a2dc57a"} Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.762952 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-hrqrc" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.763021 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hc84k" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.771011 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5w8h8" event={"ID":"00884b54-ca12-4a91-a28b-fcd78a870b68","Type":"ContainerDied","Data":"878833d0673261a2dc48c8344eaf110379ab7daa366463e40a590357f77c256f"} Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.771142 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5w8h8" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.776715 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-hrqrc" podStartSLOduration=1.776691691 podStartE2EDuration="1.776691691s" podCreationTimestamp="2025-09-30 12:24:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:24:56.775198043 +0000 UTC m=+271.024880199" watchObservedRunningTime="2025-09-30 12:24:56.776691691 +0000 UTC m=+271.026373857" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.776770 5002 scope.go:117] "RemoveContainer" containerID="c6e34ba553b4dfedb6f9ef4977e4877cf1241bebf7a2873dee2f74f23eafee97" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.779753 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ccfzv" event={"ID":"1c74574a-77fc-4f9f-93ff-ff2b64f27312","Type":"ContainerDied","Data":"16a472e95c5dd33216af5c89a96fb06e05e871d1082ed7f2d3406dbd2161ecc4"} Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.780024 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ccfzv" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.786846 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fr4sd"] Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.787433 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rhw4l" event={"ID":"30cf48ad-b568-4511-9877-bad2837d969e","Type":"ContainerDied","Data":"bb82e2a6a04df0d98864bd29604422f373266e0b0445628a4cb20502704bb9f4"} Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.787544 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rhw4l" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.789139 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fr4sd"] Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.793654 5002 scope.go:117] "RemoveContainer" containerID="63ffcd041f05f330eb10eacf002772c3351d4f2c9009428e298aa4adc3c8f1a9" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.815969 5002 scope.go:117] "RemoveContainer" containerID="8c0fd80db2f419455eeefa0ccffbd4839f4c68065259cdc4c07a58d4e26d5d31" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.834565 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hc84k"] Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.835271 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hc84k"] Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.840836 5002 scope.go:117] "RemoveContainer" containerID="3d9aa45d8dfc0b444319a64ff324caf2387c6fe5026294502cd422f5962386b7" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.862265 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ccfzv"] Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.875126 5002 scope.go:117] "RemoveContainer" containerID="84fc9e2b3a219e719c69528be80b2d8684add00db31fc5cd69d9bca958ce049d" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.879007 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-ccfzv"] Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.884444 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rhw4l"] Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.885387 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rhw4l"] Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.891749 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5w8h8"] Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.895376 5002 scope.go:117] "RemoveContainer" containerID="62934988a4d75b6af15a3581f0256d0e90ff0740b44d3ab4d4d7c82fa739dda1" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.896763 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-5w8h8"] Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.911830 5002 scope.go:117] "RemoveContainer" containerID="22e6d1ee232fc55ac08b464dd995aee3fdc0646bde92ee9db0508e19a4a0aa1b" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.924667 5002 scope.go:117] "RemoveContainer" containerID="404b2974489712b3805ca6fb2670926ce35f8948e54abd0f22d6f852e6fbde40" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.939259 5002 scope.go:117] "RemoveContainer" containerID="f0bfb6913d2ed978286c961c84b792b142b5768081d5240f9fd1617b85d8ad25" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.957383 5002 scope.go:117] "RemoveContainer" containerID="477e0350dfa79121d49c81cf03f6c77e4d09b78d1438b5663732eef0e250f271" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.971635 5002 scope.go:117] "RemoveContainer" containerID="d496999c76eb95bc5eaece602eb6f027a188cc9286e72cb0c7fef03de38af16b" Sep 30 12:24:56 crc kubenswrapper[5002]: I0930 12:24:56.986601 5002 scope.go:117] "RemoveContainer" containerID="993b97b428373e7da3ddd2d8dd55154cb30d8302cbd1bc1c64ffc2b73c37083c" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.726583 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kzj7j"] Sep 30 12:24:57 crc kubenswrapper[5002]: E0930 12:24:57.727058 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a6520d7-b4d9-4837-a574-48cb86ee6231" containerName="registry-server" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.727069 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a6520d7-b4d9-4837-a574-48cb86ee6231" containerName="registry-server" Sep 30 12:24:57 crc kubenswrapper[5002]: E0930 12:24:57.727079 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a6520d7-b4d9-4837-a574-48cb86ee6231" containerName="extract-content" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.727084 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a6520d7-b4d9-4837-a574-48cb86ee6231" containerName="extract-content" Sep 30 12:24:57 crc kubenswrapper[5002]: E0930 12:24:57.727096 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c74574a-77fc-4f9f-93ff-ff2b64f27312" containerName="extract-utilities" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.727102 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c74574a-77fc-4f9f-93ff-ff2b64f27312" containerName="extract-utilities" Sep 30 12:24:57 crc kubenswrapper[5002]: E0930 12:24:57.727110 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30cf48ad-b568-4511-9877-bad2837d969e" containerName="extract-utilities" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.727116 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="30cf48ad-b568-4511-9877-bad2837d969e" containerName="extract-utilities" Sep 30 12:24:57 crc kubenswrapper[5002]: E0930 12:24:57.727125 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00884b54-ca12-4a91-a28b-fcd78a870b68" containerName="extract-content" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.727131 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="00884b54-ca12-4a91-a28b-fcd78a870b68" containerName="extract-content" Sep 30 12:24:57 crc kubenswrapper[5002]: E0930 12:24:57.727137 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30cf48ad-b568-4511-9877-bad2837d969e" containerName="extract-content" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.727143 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="30cf48ad-b568-4511-9877-bad2837d969e" containerName="extract-content" Sep 30 12:24:57 crc kubenswrapper[5002]: E0930 12:24:57.727151 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00884b54-ca12-4a91-a28b-fcd78a870b68" containerName="extract-utilities" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.727157 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="00884b54-ca12-4a91-a28b-fcd78a870b68" containerName="extract-utilities" Sep 30 12:24:57 crc kubenswrapper[5002]: E0930 12:24:57.727165 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acdc25a7-3353-430f-b856-22a1259025ee" containerName="marketplace-operator" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.727170 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="acdc25a7-3353-430f-b856-22a1259025ee" containerName="marketplace-operator" Sep 30 12:24:57 crc kubenswrapper[5002]: E0930 12:24:57.727178 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c74574a-77fc-4f9f-93ff-ff2b64f27312" containerName="extract-content" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.727184 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c74574a-77fc-4f9f-93ff-ff2b64f27312" containerName="extract-content" Sep 30 12:24:57 crc kubenswrapper[5002]: E0930 12:24:57.727192 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c74574a-77fc-4f9f-93ff-ff2b64f27312" containerName="registry-server" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.727198 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c74574a-77fc-4f9f-93ff-ff2b64f27312" containerName="registry-server" Sep 30 12:24:57 crc kubenswrapper[5002]: E0930 12:24:57.727207 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00884b54-ca12-4a91-a28b-fcd78a870b68" containerName="registry-server" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.727214 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="00884b54-ca12-4a91-a28b-fcd78a870b68" containerName="registry-server" Sep 30 12:24:57 crc kubenswrapper[5002]: E0930 12:24:57.727225 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a6520d7-b4d9-4837-a574-48cb86ee6231" containerName="extract-utilities" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.727232 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a6520d7-b4d9-4837-a574-48cb86ee6231" containerName="extract-utilities" Sep 30 12:24:57 crc kubenswrapper[5002]: E0930 12:24:57.727242 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30cf48ad-b568-4511-9877-bad2837d969e" containerName="registry-server" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.727247 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="30cf48ad-b568-4511-9877-bad2837d969e" containerName="registry-server" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.727334 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c74574a-77fc-4f9f-93ff-ff2b64f27312" containerName="registry-server" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.727344 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="30cf48ad-b568-4511-9877-bad2837d969e" containerName="registry-server" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.727355 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="acdc25a7-3353-430f-b856-22a1259025ee" containerName="marketplace-operator" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.727362 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a6520d7-b4d9-4837-a574-48cb86ee6231" containerName="registry-server" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.727372 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="00884b54-ca12-4a91-a28b-fcd78a870b68" containerName="registry-server" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.728419 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kzj7j" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.730426 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.734840 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kzj7j"] Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.802674 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68797de5-40f1-448f-9fbb-fa3eb4adc842-catalog-content\") pod \"redhat-marketplace-kzj7j\" (UID: \"68797de5-40f1-448f-9fbb-fa3eb4adc842\") " pod="openshift-marketplace/redhat-marketplace-kzj7j" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.802787 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwtd8\" (UniqueName: \"kubernetes.io/projected/68797de5-40f1-448f-9fbb-fa3eb4adc842-kube-api-access-hwtd8\") pod \"redhat-marketplace-kzj7j\" (UID: \"68797de5-40f1-448f-9fbb-fa3eb4adc842\") " pod="openshift-marketplace/redhat-marketplace-kzj7j" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.802852 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68797de5-40f1-448f-9fbb-fa3eb4adc842-utilities\") pod \"redhat-marketplace-kzj7j\" (UID: \"68797de5-40f1-448f-9fbb-fa3eb4adc842\") " pod="openshift-marketplace/redhat-marketplace-kzj7j" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.903666 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwtd8\" (UniqueName: \"kubernetes.io/projected/68797de5-40f1-448f-9fbb-fa3eb4adc842-kube-api-access-hwtd8\") pod \"redhat-marketplace-kzj7j\" (UID: \"68797de5-40f1-448f-9fbb-fa3eb4adc842\") " pod="openshift-marketplace/redhat-marketplace-kzj7j" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.903735 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68797de5-40f1-448f-9fbb-fa3eb4adc842-utilities\") pod \"redhat-marketplace-kzj7j\" (UID: \"68797de5-40f1-448f-9fbb-fa3eb4adc842\") " pod="openshift-marketplace/redhat-marketplace-kzj7j" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.903780 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68797de5-40f1-448f-9fbb-fa3eb4adc842-catalog-content\") pod \"redhat-marketplace-kzj7j\" (UID: \"68797de5-40f1-448f-9fbb-fa3eb4adc842\") " pod="openshift-marketplace/redhat-marketplace-kzj7j" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.905040 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68797de5-40f1-448f-9fbb-fa3eb4adc842-utilities\") pod \"redhat-marketplace-kzj7j\" (UID: \"68797de5-40f1-448f-9fbb-fa3eb4adc842\") " pod="openshift-marketplace/redhat-marketplace-kzj7j" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.905271 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68797de5-40f1-448f-9fbb-fa3eb4adc842-catalog-content\") pod \"redhat-marketplace-kzj7j\" (UID: \"68797de5-40f1-448f-9fbb-fa3eb4adc842\") " pod="openshift-marketplace/redhat-marketplace-kzj7j" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.924536 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-twns5"] Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.925458 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-twns5" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.932290 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.936618 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-twns5"] Sep 30 12:24:57 crc kubenswrapper[5002]: I0930 12:24:57.944168 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwtd8\" (UniqueName: \"kubernetes.io/projected/68797de5-40f1-448f-9fbb-fa3eb4adc842-kube-api-access-hwtd8\") pod \"redhat-marketplace-kzj7j\" (UID: \"68797de5-40f1-448f-9fbb-fa3eb4adc842\") " pod="openshift-marketplace/redhat-marketplace-kzj7j" Sep 30 12:24:58 crc kubenswrapper[5002]: I0930 12:24:58.005184 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc331b2c-354f-4cb2-9aab-2f8328781341-catalog-content\") pod \"redhat-operators-twns5\" (UID: \"dc331b2c-354f-4cb2-9aab-2f8328781341\") " pod="openshift-marketplace/redhat-operators-twns5" Sep 30 12:24:58 crc kubenswrapper[5002]: I0930 12:24:58.005234 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbt5h\" (UniqueName: \"kubernetes.io/projected/dc331b2c-354f-4cb2-9aab-2f8328781341-kube-api-access-sbt5h\") pod \"redhat-operators-twns5\" (UID: \"dc331b2c-354f-4cb2-9aab-2f8328781341\") " pod="openshift-marketplace/redhat-operators-twns5" Sep 30 12:24:58 crc kubenswrapper[5002]: I0930 12:24:58.005331 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc331b2c-354f-4cb2-9aab-2f8328781341-utilities\") pod \"redhat-operators-twns5\" (UID: \"dc331b2c-354f-4cb2-9aab-2f8328781341\") " pod="openshift-marketplace/redhat-operators-twns5" Sep 30 12:24:58 crc kubenswrapper[5002]: I0930 12:24:58.057307 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kzj7j" Sep 30 12:24:58 crc kubenswrapper[5002]: I0930 12:24:58.106417 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc331b2c-354f-4cb2-9aab-2f8328781341-catalog-content\") pod \"redhat-operators-twns5\" (UID: \"dc331b2c-354f-4cb2-9aab-2f8328781341\") " pod="openshift-marketplace/redhat-operators-twns5" Sep 30 12:24:58 crc kubenswrapper[5002]: I0930 12:24:58.106497 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbt5h\" (UniqueName: \"kubernetes.io/projected/dc331b2c-354f-4cb2-9aab-2f8328781341-kube-api-access-sbt5h\") pod \"redhat-operators-twns5\" (UID: \"dc331b2c-354f-4cb2-9aab-2f8328781341\") " pod="openshift-marketplace/redhat-operators-twns5" Sep 30 12:24:58 crc kubenswrapper[5002]: I0930 12:24:58.106550 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc331b2c-354f-4cb2-9aab-2f8328781341-utilities\") pod \"redhat-operators-twns5\" (UID: \"dc331b2c-354f-4cb2-9aab-2f8328781341\") " pod="openshift-marketplace/redhat-operators-twns5" Sep 30 12:24:58 crc kubenswrapper[5002]: I0930 12:24:58.108819 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc331b2c-354f-4cb2-9aab-2f8328781341-utilities\") pod \"redhat-operators-twns5\" (UID: \"dc331b2c-354f-4cb2-9aab-2f8328781341\") " pod="openshift-marketplace/redhat-operators-twns5" Sep 30 12:24:58 crc kubenswrapper[5002]: I0930 12:24:58.109100 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc331b2c-354f-4cb2-9aab-2f8328781341-catalog-content\") pod \"redhat-operators-twns5\" (UID: \"dc331b2c-354f-4cb2-9aab-2f8328781341\") " pod="openshift-marketplace/redhat-operators-twns5" Sep 30 12:24:58 crc kubenswrapper[5002]: I0930 12:24:58.122657 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbt5h\" (UniqueName: \"kubernetes.io/projected/dc331b2c-354f-4cb2-9aab-2f8328781341-kube-api-access-sbt5h\") pod \"redhat-operators-twns5\" (UID: \"dc331b2c-354f-4cb2-9aab-2f8328781341\") " pod="openshift-marketplace/redhat-operators-twns5" Sep 30 12:24:58 crc kubenswrapper[5002]: I0930 12:24:58.233210 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kzj7j"] Sep 30 12:24:58 crc kubenswrapper[5002]: I0930 12:24:58.309182 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-twns5" Sep 30 12:24:58 crc kubenswrapper[5002]: I0930 12:24:58.485744 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-twns5"] Sep 30 12:24:58 crc kubenswrapper[5002]: W0930 12:24:58.490535 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddc331b2c_354f_4cb2_9aab_2f8328781341.slice/crio-b455ec0942fd8dbc6ffe2111b3aa78629c85e86eab78a005ef6ffd18aeaec126 WatchSource:0}: Error finding container b455ec0942fd8dbc6ffe2111b3aa78629c85e86eab78a005ef6ffd18aeaec126: Status 404 returned error can't find the container with id b455ec0942fd8dbc6ffe2111b3aa78629c85e86eab78a005ef6ffd18aeaec126 Sep 30 12:24:58 crc kubenswrapper[5002]: I0930 12:24:58.681578 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00884b54-ca12-4a91-a28b-fcd78a870b68" path="/var/lib/kubelet/pods/00884b54-ca12-4a91-a28b-fcd78a870b68/volumes" Sep 30 12:24:58 crc kubenswrapper[5002]: I0930 12:24:58.682292 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c74574a-77fc-4f9f-93ff-ff2b64f27312" path="/var/lib/kubelet/pods/1c74574a-77fc-4f9f-93ff-ff2b64f27312/volumes" Sep 30 12:24:58 crc kubenswrapper[5002]: I0930 12:24:58.682835 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30cf48ad-b568-4511-9877-bad2837d969e" path="/var/lib/kubelet/pods/30cf48ad-b568-4511-9877-bad2837d969e/volumes" Sep 30 12:24:58 crc kubenswrapper[5002]: I0930 12:24:58.683810 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a6520d7-b4d9-4837-a574-48cb86ee6231" path="/var/lib/kubelet/pods/7a6520d7-b4d9-4837-a574-48cb86ee6231/volumes" Sep 30 12:24:58 crc kubenswrapper[5002]: I0930 12:24:58.684375 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="acdc25a7-3353-430f-b856-22a1259025ee" path="/var/lib/kubelet/pods/acdc25a7-3353-430f-b856-22a1259025ee/volumes" Sep 30 12:24:58 crc kubenswrapper[5002]: I0930 12:24:58.803619 5002 generic.go:334] "Generic (PLEG): container finished" podID="68797de5-40f1-448f-9fbb-fa3eb4adc842" containerID="2331aff3d4bd5d0f2e157dcd7f357c0b4d485323791c6555e3420003a49fdcb8" exitCode=0 Sep 30 12:24:58 crc kubenswrapper[5002]: I0930 12:24:58.812443 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kzj7j" event={"ID":"68797de5-40f1-448f-9fbb-fa3eb4adc842","Type":"ContainerDied","Data":"2331aff3d4bd5d0f2e157dcd7f357c0b4d485323791c6555e3420003a49fdcb8"} Sep 30 12:24:58 crc kubenswrapper[5002]: I0930 12:24:58.812535 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kzj7j" event={"ID":"68797de5-40f1-448f-9fbb-fa3eb4adc842","Type":"ContainerStarted","Data":"60f98aebc199e5e94c1c12cccc34686be6d9c569378f471f57b70af9719eb29c"} Sep 30 12:24:58 crc kubenswrapper[5002]: I0930 12:24:58.815977 5002 generic.go:334] "Generic (PLEG): container finished" podID="dc331b2c-354f-4cb2-9aab-2f8328781341" containerID="340b42960e0f0e299f0bbec7436cdade7e733dc41df612104690452c10ee9ba0" exitCode=0 Sep 30 12:24:58 crc kubenswrapper[5002]: I0930 12:24:58.816168 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-twns5" event={"ID":"dc331b2c-354f-4cb2-9aab-2f8328781341","Type":"ContainerDied","Data":"340b42960e0f0e299f0bbec7436cdade7e733dc41df612104690452c10ee9ba0"} Sep 30 12:24:58 crc kubenswrapper[5002]: I0930 12:24:58.816351 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-twns5" event={"ID":"dc331b2c-354f-4cb2-9aab-2f8328781341","Type":"ContainerStarted","Data":"b455ec0942fd8dbc6ffe2111b3aa78629c85e86eab78a005ef6ffd18aeaec126"} Sep 30 12:24:59 crc kubenswrapper[5002]: I0930 12:24:59.824437 5002 generic.go:334] "Generic (PLEG): container finished" podID="68797de5-40f1-448f-9fbb-fa3eb4adc842" containerID="f736901dc1d34dcfca2ff8445cc1fe8458f9350bfbf685be8c860e40501178ae" exitCode=0 Sep 30 12:24:59 crc kubenswrapper[5002]: I0930 12:24:59.824715 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kzj7j" event={"ID":"68797de5-40f1-448f-9fbb-fa3eb4adc842","Type":"ContainerDied","Data":"f736901dc1d34dcfca2ff8445cc1fe8458f9350bfbf685be8c860e40501178ae"} Sep 30 12:24:59 crc kubenswrapper[5002]: I0930 12:24:59.828762 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-twns5" event={"ID":"dc331b2c-354f-4cb2-9aab-2f8328781341","Type":"ContainerStarted","Data":"f756d0c216d0ad34b43f00d4d288d081fbe00e5a952c9151a9da49f2e6f77bec"} Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.126045 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-p4m6q"] Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.127208 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p4m6q" Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.129238 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.136346 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jv2t\" (UniqueName: \"kubernetes.io/projected/122cb276-c38c-4c29-80f9-b9e225b0a5a6-kube-api-access-2jv2t\") pod \"certified-operators-p4m6q\" (UID: \"122cb276-c38c-4c29-80f9-b9e225b0a5a6\") " pod="openshift-marketplace/certified-operators-p4m6q" Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.136437 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/122cb276-c38c-4c29-80f9-b9e225b0a5a6-utilities\") pod \"certified-operators-p4m6q\" (UID: \"122cb276-c38c-4c29-80f9-b9e225b0a5a6\") " pod="openshift-marketplace/certified-operators-p4m6q" Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.136457 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/122cb276-c38c-4c29-80f9-b9e225b0a5a6-catalog-content\") pod \"certified-operators-p4m6q\" (UID: \"122cb276-c38c-4c29-80f9-b9e225b0a5a6\") " pod="openshift-marketplace/certified-operators-p4m6q" Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.137275 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-p4m6q"] Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.237349 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/122cb276-c38c-4c29-80f9-b9e225b0a5a6-utilities\") pod \"certified-operators-p4m6q\" (UID: \"122cb276-c38c-4c29-80f9-b9e225b0a5a6\") " pod="openshift-marketplace/certified-operators-p4m6q" Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.237648 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/122cb276-c38c-4c29-80f9-b9e225b0a5a6-catalog-content\") pod \"certified-operators-p4m6q\" (UID: \"122cb276-c38c-4c29-80f9-b9e225b0a5a6\") " pod="openshift-marketplace/certified-operators-p4m6q" Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.237769 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jv2t\" (UniqueName: \"kubernetes.io/projected/122cb276-c38c-4c29-80f9-b9e225b0a5a6-kube-api-access-2jv2t\") pod \"certified-operators-p4m6q\" (UID: \"122cb276-c38c-4c29-80f9-b9e225b0a5a6\") " pod="openshift-marketplace/certified-operators-p4m6q" Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.237949 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/122cb276-c38c-4c29-80f9-b9e225b0a5a6-utilities\") pod \"certified-operators-p4m6q\" (UID: \"122cb276-c38c-4c29-80f9-b9e225b0a5a6\") " pod="openshift-marketplace/certified-operators-p4m6q" Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.238395 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/122cb276-c38c-4c29-80f9-b9e225b0a5a6-catalog-content\") pod \"certified-operators-p4m6q\" (UID: \"122cb276-c38c-4c29-80f9-b9e225b0a5a6\") " pod="openshift-marketplace/certified-operators-p4m6q" Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.260846 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jv2t\" (UniqueName: \"kubernetes.io/projected/122cb276-c38c-4c29-80f9-b9e225b0a5a6-kube-api-access-2jv2t\") pod \"certified-operators-p4m6q\" (UID: \"122cb276-c38c-4c29-80f9-b9e225b0a5a6\") " pod="openshift-marketplace/certified-operators-p4m6q" Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.330460 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ssrzt"] Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.331763 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ssrzt" Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.336689 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.338468 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8db93be7-82da-4e11-bfd0-d5e7d804177c-utilities\") pod \"community-operators-ssrzt\" (UID: \"8db93be7-82da-4e11-bfd0-d5e7d804177c\") " pod="openshift-marketplace/community-operators-ssrzt" Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.338551 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nv9c\" (UniqueName: \"kubernetes.io/projected/8db93be7-82da-4e11-bfd0-d5e7d804177c-kube-api-access-6nv9c\") pod \"community-operators-ssrzt\" (UID: \"8db93be7-82da-4e11-bfd0-d5e7d804177c\") " pod="openshift-marketplace/community-operators-ssrzt" Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.338572 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8db93be7-82da-4e11-bfd0-d5e7d804177c-catalog-content\") pod \"community-operators-ssrzt\" (UID: \"8db93be7-82da-4e11-bfd0-d5e7d804177c\") " pod="openshift-marketplace/community-operators-ssrzt" Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.338813 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ssrzt"] Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.439376 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8db93be7-82da-4e11-bfd0-d5e7d804177c-utilities\") pod \"community-operators-ssrzt\" (UID: \"8db93be7-82da-4e11-bfd0-d5e7d804177c\") " pod="openshift-marketplace/community-operators-ssrzt" Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.439426 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nv9c\" (UniqueName: \"kubernetes.io/projected/8db93be7-82da-4e11-bfd0-d5e7d804177c-kube-api-access-6nv9c\") pod \"community-operators-ssrzt\" (UID: \"8db93be7-82da-4e11-bfd0-d5e7d804177c\") " pod="openshift-marketplace/community-operators-ssrzt" Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.439444 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8db93be7-82da-4e11-bfd0-d5e7d804177c-catalog-content\") pod \"community-operators-ssrzt\" (UID: \"8db93be7-82da-4e11-bfd0-d5e7d804177c\") " pod="openshift-marketplace/community-operators-ssrzt" Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.442829 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8db93be7-82da-4e11-bfd0-d5e7d804177c-catalog-content\") pod \"community-operators-ssrzt\" (UID: \"8db93be7-82da-4e11-bfd0-d5e7d804177c\") " pod="openshift-marketplace/community-operators-ssrzt" Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.442982 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8db93be7-82da-4e11-bfd0-d5e7d804177c-utilities\") pod \"community-operators-ssrzt\" (UID: \"8db93be7-82da-4e11-bfd0-d5e7d804177c\") " pod="openshift-marketplace/community-operators-ssrzt" Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.451429 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p4m6q" Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.455816 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nv9c\" (UniqueName: \"kubernetes.io/projected/8db93be7-82da-4e11-bfd0-d5e7d804177c-kube-api-access-6nv9c\") pod \"community-operators-ssrzt\" (UID: \"8db93be7-82da-4e11-bfd0-d5e7d804177c\") " pod="openshift-marketplace/community-operators-ssrzt" Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.654818 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ssrzt" Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.673570 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-p4m6q"] Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.838084 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kzj7j" event={"ID":"68797de5-40f1-448f-9fbb-fa3eb4adc842","Type":"ContainerStarted","Data":"ef00612a6982cd1be8427db0e5810459d32161ee22fb8c120d2d33d60e91882d"} Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.839772 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p4m6q" event={"ID":"122cb276-c38c-4c29-80f9-b9e225b0a5a6","Type":"ContainerStarted","Data":"3d59d7a604a1dcc26fe9d5cad82f791b62f4e51fcd34ea34453cf6d79a1dcfa2"} Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.839812 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p4m6q" event={"ID":"122cb276-c38c-4c29-80f9-b9e225b0a5a6","Type":"ContainerStarted","Data":"a969e4e6c6b3d19ac7f6f0b450196dc5a3a828a77382a0965d89aeec42b74b7e"} Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.846378 5002 generic.go:334] "Generic (PLEG): container finished" podID="dc331b2c-354f-4cb2-9aab-2f8328781341" containerID="f756d0c216d0ad34b43f00d4d288d081fbe00e5a952c9151a9da49f2e6f77bec" exitCode=0 Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.846430 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-twns5" event={"ID":"dc331b2c-354f-4cb2-9aab-2f8328781341","Type":"ContainerDied","Data":"f756d0c216d0ad34b43f00d4d288d081fbe00e5a952c9151a9da49f2e6f77bec"} Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.855496 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kzj7j" podStartSLOduration=2.295488023 podStartE2EDuration="3.855464211s" podCreationTimestamp="2025-09-30 12:24:57 +0000 UTC" firstStartedPulling="2025-09-30 12:24:58.814761113 +0000 UTC m=+273.064443259" lastFinishedPulling="2025-09-30 12:25:00.374737311 +0000 UTC m=+274.624419447" observedRunningTime="2025-09-30 12:25:00.855237885 +0000 UTC m=+275.104920031" watchObservedRunningTime="2025-09-30 12:25:00.855464211 +0000 UTC m=+275.105146357" Sep 30 12:25:00 crc kubenswrapper[5002]: I0930 12:25:00.869305 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ssrzt"] Sep 30 12:25:00 crc kubenswrapper[5002]: W0930 12:25:00.906364 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8db93be7_82da_4e11_bfd0_d5e7d804177c.slice/crio-5d84044748db94ad077e935d6cf4b6263fd129096ced8f88b958b1229ef2f172 WatchSource:0}: Error finding container 5d84044748db94ad077e935d6cf4b6263fd129096ced8f88b958b1229ef2f172: Status 404 returned error can't find the container with id 5d84044748db94ad077e935d6cf4b6263fd129096ced8f88b958b1229ef2f172 Sep 30 12:25:01 crc kubenswrapper[5002]: I0930 12:25:01.853290 5002 generic.go:334] "Generic (PLEG): container finished" podID="8db93be7-82da-4e11-bfd0-d5e7d804177c" containerID="42515d05952c343c9e7a2eb093f0e805bb018518648b57cbfab706553fce5dbd" exitCode=0 Sep 30 12:25:01 crc kubenswrapper[5002]: I0930 12:25:01.853492 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ssrzt" event={"ID":"8db93be7-82da-4e11-bfd0-d5e7d804177c","Type":"ContainerDied","Data":"42515d05952c343c9e7a2eb093f0e805bb018518648b57cbfab706553fce5dbd"} Sep 30 12:25:01 crc kubenswrapper[5002]: I0930 12:25:01.853573 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ssrzt" event={"ID":"8db93be7-82da-4e11-bfd0-d5e7d804177c","Type":"ContainerStarted","Data":"5d84044748db94ad077e935d6cf4b6263fd129096ced8f88b958b1229ef2f172"} Sep 30 12:25:01 crc kubenswrapper[5002]: I0930 12:25:01.855272 5002 generic.go:334] "Generic (PLEG): container finished" podID="122cb276-c38c-4c29-80f9-b9e225b0a5a6" containerID="3d59d7a604a1dcc26fe9d5cad82f791b62f4e51fcd34ea34453cf6d79a1dcfa2" exitCode=0 Sep 30 12:25:01 crc kubenswrapper[5002]: I0930 12:25:01.855316 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p4m6q" event={"ID":"122cb276-c38c-4c29-80f9-b9e225b0a5a6","Type":"ContainerDied","Data":"3d59d7a604a1dcc26fe9d5cad82f791b62f4e51fcd34ea34453cf6d79a1dcfa2"} Sep 30 12:25:02 crc kubenswrapper[5002]: I0930 12:25:02.862362 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-twns5" event={"ID":"dc331b2c-354f-4cb2-9aab-2f8328781341","Type":"ContainerStarted","Data":"790accf6bd037c6c4833181c32304af15b6c44922ccfd7ba408f8d7bd4356303"} Sep 30 12:25:02 crc kubenswrapper[5002]: I0930 12:25:02.880914 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-twns5" podStartSLOduration=2.839025652 podStartE2EDuration="5.880861508s" podCreationTimestamp="2025-09-30 12:24:57 +0000 UTC" firstStartedPulling="2025-09-30 12:24:58.817984255 +0000 UTC m=+273.067666401" lastFinishedPulling="2025-09-30 12:25:01.859820111 +0000 UTC m=+276.109502257" observedRunningTime="2025-09-30 12:25:02.880685184 +0000 UTC m=+277.130367340" watchObservedRunningTime="2025-09-30 12:25:02.880861508 +0000 UTC m=+277.130543664" Sep 30 12:25:03 crc kubenswrapper[5002]: I0930 12:25:03.869854 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p4m6q" event={"ID":"122cb276-c38c-4c29-80f9-b9e225b0a5a6","Type":"ContainerStarted","Data":"8286d88075c7fec67d729d0ee90cd5b7ad3fd49f3501ba6662bd783df016b42a"} Sep 30 12:25:03 crc kubenswrapper[5002]: I0930 12:25:03.874333 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ssrzt" event={"ID":"8db93be7-82da-4e11-bfd0-d5e7d804177c","Type":"ContainerStarted","Data":"3aefff65f606af8d90946231508d9f7fe66d6ef8a730a94b21c6daea216e8913"} Sep 30 12:25:04 crc kubenswrapper[5002]: I0930 12:25:04.881730 5002 generic.go:334] "Generic (PLEG): container finished" podID="122cb276-c38c-4c29-80f9-b9e225b0a5a6" containerID="8286d88075c7fec67d729d0ee90cd5b7ad3fd49f3501ba6662bd783df016b42a" exitCode=0 Sep 30 12:25:04 crc kubenswrapper[5002]: I0930 12:25:04.881785 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p4m6q" event={"ID":"122cb276-c38c-4c29-80f9-b9e225b0a5a6","Type":"ContainerDied","Data":"8286d88075c7fec67d729d0ee90cd5b7ad3fd49f3501ba6662bd783df016b42a"} Sep 30 12:25:05 crc kubenswrapper[5002]: I0930 12:25:04.885883 5002 generic.go:334] "Generic (PLEG): container finished" podID="8db93be7-82da-4e11-bfd0-d5e7d804177c" containerID="3aefff65f606af8d90946231508d9f7fe66d6ef8a730a94b21c6daea216e8913" exitCode=0 Sep 30 12:25:05 crc kubenswrapper[5002]: I0930 12:25:04.885926 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ssrzt" event={"ID":"8db93be7-82da-4e11-bfd0-d5e7d804177c","Type":"ContainerDied","Data":"3aefff65f606af8d90946231508d9f7fe66d6ef8a730a94b21c6daea216e8913"} Sep 30 12:25:05 crc kubenswrapper[5002]: I0930 12:25:05.894072 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p4m6q" event={"ID":"122cb276-c38c-4c29-80f9-b9e225b0a5a6","Type":"ContainerStarted","Data":"5c544bf037e375d7789ae3cd840b9d98fda01826763762617d56abd9c3f9d4dc"} Sep 30 12:25:05 crc kubenswrapper[5002]: I0930 12:25:05.896869 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ssrzt" event={"ID":"8db93be7-82da-4e11-bfd0-d5e7d804177c","Type":"ContainerStarted","Data":"5817a8fdc38fe6e7086c8b844b364fbeec00e33c972bb3dc60a9a454afbc61ec"} Sep 30 12:25:05 crc kubenswrapper[5002]: I0930 12:25:05.913259 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-p4m6q" podStartSLOduration=2.289461764 podStartE2EDuration="5.913235982s" podCreationTimestamp="2025-09-30 12:25:00 +0000 UTC" firstStartedPulling="2025-09-30 12:25:01.85940251 +0000 UTC m=+276.109084666" lastFinishedPulling="2025-09-30 12:25:05.483176728 +0000 UTC m=+279.732858884" observedRunningTime="2025-09-30 12:25:05.910945763 +0000 UTC m=+280.160627919" watchObservedRunningTime="2025-09-30 12:25:05.913235982 +0000 UTC m=+280.162918148" Sep 30 12:25:05 crc kubenswrapper[5002]: I0930 12:25:05.932866 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ssrzt" podStartSLOduration=2.138626871 podStartE2EDuration="5.932849114s" podCreationTimestamp="2025-09-30 12:25:00 +0000 UTC" firstStartedPulling="2025-09-30 12:25:01.859209735 +0000 UTC m=+276.108891881" lastFinishedPulling="2025-09-30 12:25:05.653431978 +0000 UTC m=+279.903114124" observedRunningTime="2025-09-30 12:25:05.930246948 +0000 UTC m=+280.179929094" watchObservedRunningTime="2025-09-30 12:25:05.932849114 +0000 UTC m=+280.182531260" Sep 30 12:25:08 crc kubenswrapper[5002]: I0930 12:25:08.058364 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kzj7j" Sep 30 12:25:08 crc kubenswrapper[5002]: I0930 12:25:08.058912 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kzj7j" Sep 30 12:25:08 crc kubenswrapper[5002]: I0930 12:25:08.125610 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kzj7j" Sep 30 12:25:08 crc kubenswrapper[5002]: I0930 12:25:08.309753 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-twns5" Sep 30 12:25:08 crc kubenswrapper[5002]: I0930 12:25:08.309816 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-twns5" Sep 30 12:25:08 crc kubenswrapper[5002]: I0930 12:25:08.369766 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-twns5" Sep 30 12:25:08 crc kubenswrapper[5002]: I0930 12:25:08.953582 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-twns5" Sep 30 12:25:08 crc kubenswrapper[5002]: I0930 12:25:08.957965 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kzj7j" Sep 30 12:25:10 crc kubenswrapper[5002]: I0930 12:25:10.452656 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-p4m6q" Sep 30 12:25:10 crc kubenswrapper[5002]: I0930 12:25:10.452724 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-p4m6q" Sep 30 12:25:10 crc kubenswrapper[5002]: I0930 12:25:10.515899 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-p4m6q" Sep 30 12:25:10 crc kubenswrapper[5002]: I0930 12:25:10.656063 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ssrzt" Sep 30 12:25:10 crc kubenswrapper[5002]: I0930 12:25:10.656108 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ssrzt" Sep 30 12:25:10 crc kubenswrapper[5002]: I0930 12:25:10.693912 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ssrzt" Sep 30 12:25:10 crc kubenswrapper[5002]: I0930 12:25:10.988953 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ssrzt" Sep 30 12:25:10 crc kubenswrapper[5002]: I0930 12:25:10.989917 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-p4m6q" Sep 30 12:26:02 crc kubenswrapper[5002]: I0930 12:26:02.098939 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:26:02 crc kubenswrapper[5002]: I0930 12:26:02.099570 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:26:32 crc kubenswrapper[5002]: I0930 12:26:32.098578 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:26:32 crc kubenswrapper[5002]: I0930 12:26:32.099270 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:26:56 crc kubenswrapper[5002]: I0930 12:26:56.809779 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-85vdp"] Sep 30 12:26:56 crc kubenswrapper[5002]: I0930 12:26:56.811025 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" Sep 30 12:26:56 crc kubenswrapper[5002]: I0930 12:26:56.825561 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-85vdp"] Sep 30 12:26:56 crc kubenswrapper[5002]: I0930 12:26:56.988116 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/71dbe003-4e32-484e-a13b-de49aea188a4-ca-trust-extracted\") pod \"image-registry-66df7c8f76-85vdp\" (UID: \"71dbe003-4e32-484e-a13b-de49aea188a4\") " pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" Sep 30 12:26:56 crc kubenswrapper[5002]: I0930 12:26:56.988162 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/71dbe003-4e32-484e-a13b-de49aea188a4-registry-tls\") pod \"image-registry-66df7c8f76-85vdp\" (UID: \"71dbe003-4e32-484e-a13b-de49aea188a4\") " pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" Sep 30 12:26:56 crc kubenswrapper[5002]: I0930 12:26:56.988180 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/71dbe003-4e32-484e-a13b-de49aea188a4-registry-certificates\") pod \"image-registry-66df7c8f76-85vdp\" (UID: \"71dbe003-4e32-484e-a13b-de49aea188a4\") " pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" Sep 30 12:26:56 crc kubenswrapper[5002]: I0930 12:26:56.988204 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/71dbe003-4e32-484e-a13b-de49aea188a4-trusted-ca\") pod \"image-registry-66df7c8f76-85vdp\" (UID: \"71dbe003-4e32-484e-a13b-de49aea188a4\") " pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" Sep 30 12:26:56 crc kubenswrapper[5002]: I0930 12:26:56.988240 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldrrc\" (UniqueName: \"kubernetes.io/projected/71dbe003-4e32-484e-a13b-de49aea188a4-kube-api-access-ldrrc\") pod \"image-registry-66df7c8f76-85vdp\" (UID: \"71dbe003-4e32-484e-a13b-de49aea188a4\") " pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" Sep 30 12:26:56 crc kubenswrapper[5002]: I0930 12:26:56.988258 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/71dbe003-4e32-484e-a13b-de49aea188a4-bound-sa-token\") pod \"image-registry-66df7c8f76-85vdp\" (UID: \"71dbe003-4e32-484e-a13b-de49aea188a4\") " pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" Sep 30 12:26:56 crc kubenswrapper[5002]: I0930 12:26:56.988292 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-85vdp\" (UID: \"71dbe003-4e32-484e-a13b-de49aea188a4\") " pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" Sep 30 12:26:56 crc kubenswrapper[5002]: I0930 12:26:56.988313 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/71dbe003-4e32-484e-a13b-de49aea188a4-installation-pull-secrets\") pod \"image-registry-66df7c8f76-85vdp\" (UID: \"71dbe003-4e32-484e-a13b-de49aea188a4\") " pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" Sep 30 12:26:57 crc kubenswrapper[5002]: I0930 12:26:57.032029 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-85vdp\" (UID: \"71dbe003-4e32-484e-a13b-de49aea188a4\") " pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" Sep 30 12:26:57 crc kubenswrapper[5002]: I0930 12:26:57.089533 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/71dbe003-4e32-484e-a13b-de49aea188a4-bound-sa-token\") pod \"image-registry-66df7c8f76-85vdp\" (UID: \"71dbe003-4e32-484e-a13b-de49aea188a4\") " pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" Sep 30 12:26:57 crc kubenswrapper[5002]: I0930 12:26:57.089649 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/71dbe003-4e32-484e-a13b-de49aea188a4-installation-pull-secrets\") pod \"image-registry-66df7c8f76-85vdp\" (UID: \"71dbe003-4e32-484e-a13b-de49aea188a4\") " pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" Sep 30 12:26:57 crc kubenswrapper[5002]: I0930 12:26:57.089721 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/71dbe003-4e32-484e-a13b-de49aea188a4-ca-trust-extracted\") pod \"image-registry-66df7c8f76-85vdp\" (UID: \"71dbe003-4e32-484e-a13b-de49aea188a4\") " pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" Sep 30 12:26:57 crc kubenswrapper[5002]: I0930 12:26:57.090526 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/71dbe003-4e32-484e-a13b-de49aea188a4-ca-trust-extracted\") pod \"image-registry-66df7c8f76-85vdp\" (UID: \"71dbe003-4e32-484e-a13b-de49aea188a4\") " pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" Sep 30 12:26:57 crc kubenswrapper[5002]: I0930 12:26:57.090653 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/71dbe003-4e32-484e-a13b-de49aea188a4-registry-tls\") pod \"image-registry-66df7c8f76-85vdp\" (UID: \"71dbe003-4e32-484e-a13b-de49aea188a4\") " pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" Sep 30 12:26:57 crc kubenswrapper[5002]: I0930 12:26:57.090761 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/71dbe003-4e32-484e-a13b-de49aea188a4-registry-certificates\") pod \"image-registry-66df7c8f76-85vdp\" (UID: \"71dbe003-4e32-484e-a13b-de49aea188a4\") " pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" Sep 30 12:26:57 crc kubenswrapper[5002]: I0930 12:26:57.090895 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/71dbe003-4e32-484e-a13b-de49aea188a4-trusted-ca\") pod \"image-registry-66df7c8f76-85vdp\" (UID: \"71dbe003-4e32-484e-a13b-de49aea188a4\") " pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" Sep 30 12:26:57 crc kubenswrapper[5002]: I0930 12:26:57.091069 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldrrc\" (UniqueName: \"kubernetes.io/projected/71dbe003-4e32-484e-a13b-de49aea188a4-kube-api-access-ldrrc\") pod \"image-registry-66df7c8f76-85vdp\" (UID: \"71dbe003-4e32-484e-a13b-de49aea188a4\") " pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" Sep 30 12:26:57 crc kubenswrapper[5002]: I0930 12:26:57.091968 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/71dbe003-4e32-484e-a13b-de49aea188a4-trusted-ca\") pod \"image-registry-66df7c8f76-85vdp\" (UID: \"71dbe003-4e32-484e-a13b-de49aea188a4\") " pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" Sep 30 12:26:57 crc kubenswrapper[5002]: I0930 12:26:57.092117 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/71dbe003-4e32-484e-a13b-de49aea188a4-registry-certificates\") pod \"image-registry-66df7c8f76-85vdp\" (UID: \"71dbe003-4e32-484e-a13b-de49aea188a4\") " pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" Sep 30 12:26:57 crc kubenswrapper[5002]: I0930 12:26:57.096141 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/71dbe003-4e32-484e-a13b-de49aea188a4-installation-pull-secrets\") pod \"image-registry-66df7c8f76-85vdp\" (UID: \"71dbe003-4e32-484e-a13b-de49aea188a4\") " pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" Sep 30 12:26:57 crc kubenswrapper[5002]: I0930 12:26:57.102243 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/71dbe003-4e32-484e-a13b-de49aea188a4-registry-tls\") pod \"image-registry-66df7c8f76-85vdp\" (UID: \"71dbe003-4e32-484e-a13b-de49aea188a4\") " pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" Sep 30 12:26:57 crc kubenswrapper[5002]: I0930 12:26:57.106267 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/71dbe003-4e32-484e-a13b-de49aea188a4-bound-sa-token\") pod \"image-registry-66df7c8f76-85vdp\" (UID: \"71dbe003-4e32-484e-a13b-de49aea188a4\") " pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" Sep 30 12:26:57 crc kubenswrapper[5002]: I0930 12:26:57.108634 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldrrc\" (UniqueName: \"kubernetes.io/projected/71dbe003-4e32-484e-a13b-de49aea188a4-kube-api-access-ldrrc\") pod \"image-registry-66df7c8f76-85vdp\" (UID: \"71dbe003-4e32-484e-a13b-de49aea188a4\") " pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" Sep 30 12:26:57 crc kubenswrapper[5002]: I0930 12:26:57.129267 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" Sep 30 12:26:57 crc kubenswrapper[5002]: I0930 12:26:57.312534 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-85vdp"] Sep 30 12:26:57 crc kubenswrapper[5002]: I0930 12:26:57.594824 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" event={"ID":"71dbe003-4e32-484e-a13b-de49aea188a4","Type":"ContainerStarted","Data":"5870c7c4ac24b4786a5cdc72b4ba386789ad5a539a5575f970770feee4519f53"} Sep 30 12:26:57 crc kubenswrapper[5002]: I0930 12:26:57.594867 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" event={"ID":"71dbe003-4e32-484e-a13b-de49aea188a4","Type":"ContainerStarted","Data":"5989e79ab871b183f16bd4157e1e8588d87a0f802aae2dc9f244fb034ea55e79"} Sep 30 12:26:57 crc kubenswrapper[5002]: I0930 12:26:57.594999 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" Sep 30 12:26:57 crc kubenswrapper[5002]: I0930 12:26:57.621211 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" podStartSLOduration=1.621190438 podStartE2EDuration="1.621190438s" podCreationTimestamp="2025-09-30 12:26:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:26:57.618391308 +0000 UTC m=+391.868073494" watchObservedRunningTime="2025-09-30 12:26:57.621190438 +0000 UTC m=+391.870872584" Sep 30 12:27:02 crc kubenswrapper[5002]: I0930 12:27:02.098809 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:27:02 crc kubenswrapper[5002]: I0930 12:27:02.099397 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:27:02 crc kubenswrapper[5002]: I0930 12:27:02.099460 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" Sep 30 12:27:02 crc kubenswrapper[5002]: I0930 12:27:02.100378 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a6a0c92d3cdc4d9bfc4039ca649c102e891457b2cdc698fa8e2c73771783ffd2"} pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 12:27:02 crc kubenswrapper[5002]: I0930 12:27:02.100508 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" containerID="cri-o://a6a0c92d3cdc4d9bfc4039ca649c102e891457b2cdc698fa8e2c73771783ffd2" gracePeriod=600 Sep 30 12:27:02 crc kubenswrapper[5002]: I0930 12:27:02.630821 5002 generic.go:334] "Generic (PLEG): container finished" podID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerID="a6a0c92d3cdc4d9bfc4039ca649c102e891457b2cdc698fa8e2c73771783ffd2" exitCode=0 Sep 30 12:27:02 crc kubenswrapper[5002]: I0930 12:27:02.631028 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerDied","Data":"a6a0c92d3cdc4d9bfc4039ca649c102e891457b2cdc698fa8e2c73771783ffd2"} Sep 30 12:27:02 crc kubenswrapper[5002]: I0930 12:27:02.631238 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerStarted","Data":"2265b6a0e732448a595146d842013f7245c5b783b761a0762decdaca959b8f6c"} Sep 30 12:27:02 crc kubenswrapper[5002]: I0930 12:27:02.631275 5002 scope.go:117] "RemoveContainer" containerID="6868404a7219f8dbbc4e7541e5f80402c169808daea81b5f94546472cb8e70a6" Sep 30 12:27:17 crc kubenswrapper[5002]: I0930 12:27:17.137172 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-85vdp" Sep 30 12:27:17 crc kubenswrapper[5002]: I0930 12:27:17.210338 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zfcl9"] Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.264109 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" podUID="048223bb-4ff9-41e0-9b78-457a720ff399" containerName="registry" containerID="cri-o://d762565a981249cbcc25e00d9edcfde22446050681650e2fd5ba15e6325bf730" gracePeriod=30 Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.656200 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.784217 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"048223bb-4ff9-41e0-9b78-457a720ff399\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.784261 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vgd9l\" (UniqueName: \"kubernetes.io/projected/048223bb-4ff9-41e0-9b78-457a720ff399-kube-api-access-vgd9l\") pod \"048223bb-4ff9-41e0-9b78-457a720ff399\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.784304 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/048223bb-4ff9-41e0-9b78-457a720ff399-bound-sa-token\") pod \"048223bb-4ff9-41e0-9b78-457a720ff399\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.784330 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/048223bb-4ff9-41e0-9b78-457a720ff399-registry-certificates\") pod \"048223bb-4ff9-41e0-9b78-457a720ff399\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.784355 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/048223bb-4ff9-41e0-9b78-457a720ff399-trusted-ca\") pod \"048223bb-4ff9-41e0-9b78-457a720ff399\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.784417 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/048223bb-4ff9-41e0-9b78-457a720ff399-ca-trust-extracted\") pod \"048223bb-4ff9-41e0-9b78-457a720ff399\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.784438 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/048223bb-4ff9-41e0-9b78-457a720ff399-registry-tls\") pod \"048223bb-4ff9-41e0-9b78-457a720ff399\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.784464 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/048223bb-4ff9-41e0-9b78-457a720ff399-installation-pull-secrets\") pod \"048223bb-4ff9-41e0-9b78-457a720ff399\" (UID: \"048223bb-4ff9-41e0-9b78-457a720ff399\") " Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.789552 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/048223bb-4ff9-41e0-9b78-457a720ff399-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "048223bb-4ff9-41e0-9b78-457a720ff399" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.789611 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/048223bb-4ff9-41e0-9b78-457a720ff399-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "048223bb-4ff9-41e0-9b78-457a720ff399" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.793078 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/048223bb-4ff9-41e0-9b78-457a720ff399-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "048223bb-4ff9-41e0-9b78-457a720ff399" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.793612 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/048223bb-4ff9-41e0-9b78-457a720ff399-kube-api-access-vgd9l" (OuterVolumeSpecName: "kube-api-access-vgd9l") pod "048223bb-4ff9-41e0-9b78-457a720ff399" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399"). InnerVolumeSpecName "kube-api-access-vgd9l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.793724 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/048223bb-4ff9-41e0-9b78-457a720ff399-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "048223bb-4ff9-41e0-9b78-457a720ff399" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.796615 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/048223bb-4ff9-41e0-9b78-457a720ff399-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "048223bb-4ff9-41e0-9b78-457a720ff399" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.804836 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "048223bb-4ff9-41e0-9b78-457a720ff399" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.813528 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/048223bb-4ff9-41e0-9b78-457a720ff399-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "048223bb-4ff9-41e0-9b78-457a720ff399" (UID: "048223bb-4ff9-41e0-9b78-457a720ff399"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.886758 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vgd9l\" (UniqueName: \"kubernetes.io/projected/048223bb-4ff9-41e0-9b78-457a720ff399-kube-api-access-vgd9l\") on node \"crc\" DevicePath \"\"" Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.886800 5002 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/048223bb-4ff9-41e0-9b78-457a720ff399-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.886842 5002 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/048223bb-4ff9-41e0-9b78-457a720ff399-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.886859 5002 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/048223bb-4ff9-41e0-9b78-457a720ff399-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.886870 5002 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/048223bb-4ff9-41e0-9b78-457a720ff399-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.886880 5002 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/048223bb-4ff9-41e0-9b78-457a720ff399-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.886912 5002 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/048223bb-4ff9-41e0-9b78-457a720ff399-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.893829 5002 generic.go:334] "Generic (PLEG): container finished" podID="048223bb-4ff9-41e0-9b78-457a720ff399" containerID="d762565a981249cbcc25e00d9edcfde22446050681650e2fd5ba15e6325bf730" exitCode=0 Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.893923 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.893883 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" event={"ID":"048223bb-4ff9-41e0-9b78-457a720ff399","Type":"ContainerDied","Data":"d762565a981249cbcc25e00d9edcfde22446050681650e2fd5ba15e6325bf730"} Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.894093 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-zfcl9" event={"ID":"048223bb-4ff9-41e0-9b78-457a720ff399","Type":"ContainerDied","Data":"2789dab23b8c0aa1c2118197e37824969da42811b379310113f6b92c487cac48"} Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.894147 5002 scope.go:117] "RemoveContainer" containerID="d762565a981249cbcc25e00d9edcfde22446050681650e2fd5ba15e6325bf730" Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.916620 5002 scope.go:117] "RemoveContainer" containerID="d762565a981249cbcc25e00d9edcfde22446050681650e2fd5ba15e6325bf730" Sep 30 12:27:42 crc kubenswrapper[5002]: E0930 12:27:42.917709 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d762565a981249cbcc25e00d9edcfde22446050681650e2fd5ba15e6325bf730\": container with ID starting with d762565a981249cbcc25e00d9edcfde22446050681650e2fd5ba15e6325bf730 not found: ID does not exist" containerID="d762565a981249cbcc25e00d9edcfde22446050681650e2fd5ba15e6325bf730" Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.917790 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d762565a981249cbcc25e00d9edcfde22446050681650e2fd5ba15e6325bf730"} err="failed to get container status \"d762565a981249cbcc25e00d9edcfde22446050681650e2fd5ba15e6325bf730\": rpc error: code = NotFound desc = could not find container \"d762565a981249cbcc25e00d9edcfde22446050681650e2fd5ba15e6325bf730\": container with ID starting with d762565a981249cbcc25e00d9edcfde22446050681650e2fd5ba15e6325bf730 not found: ID does not exist" Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.930965 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zfcl9"] Sep 30 12:27:42 crc kubenswrapper[5002]: I0930 12:27:42.940799 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zfcl9"] Sep 30 12:27:44 crc kubenswrapper[5002]: I0930 12:27:44.685267 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="048223bb-4ff9-41e0-9b78-457a720ff399" path="/var/lib/kubelet/pods/048223bb-4ff9-41e0-9b78-457a720ff399/volumes" Sep 30 12:29:02 crc kubenswrapper[5002]: I0930 12:29:02.098196 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:29:02 crc kubenswrapper[5002]: I0930 12:29:02.098806 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:29:32 crc kubenswrapper[5002]: I0930 12:29:32.099245 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:29:32 crc kubenswrapper[5002]: I0930 12:29:32.099947 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:30:00 crc kubenswrapper[5002]: I0930 12:30:00.146934 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320590-5z254"] Sep 30 12:30:00 crc kubenswrapper[5002]: E0930 12:30:00.148364 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="048223bb-4ff9-41e0-9b78-457a720ff399" containerName="registry" Sep 30 12:30:00 crc kubenswrapper[5002]: I0930 12:30:00.148382 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="048223bb-4ff9-41e0-9b78-457a720ff399" containerName="registry" Sep 30 12:30:00 crc kubenswrapper[5002]: I0930 12:30:00.148623 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="048223bb-4ff9-41e0-9b78-457a720ff399" containerName="registry" Sep 30 12:30:00 crc kubenswrapper[5002]: I0930 12:30:00.149178 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320590-5z254" Sep 30 12:30:00 crc kubenswrapper[5002]: I0930 12:30:00.155319 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 12:30:00 crc kubenswrapper[5002]: I0930 12:30:00.155572 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 12:30:00 crc kubenswrapper[5002]: I0930 12:30:00.158062 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320590-5z254"] Sep 30 12:30:00 crc kubenswrapper[5002]: I0930 12:30:00.194486 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8gndj\" (UniqueName: \"kubernetes.io/projected/e2e6d49f-130e-4161-9ee5-72ad4a0bc56c-kube-api-access-8gndj\") pod \"collect-profiles-29320590-5z254\" (UID: \"e2e6d49f-130e-4161-9ee5-72ad4a0bc56c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320590-5z254" Sep 30 12:30:00 crc kubenswrapper[5002]: I0930 12:30:00.194557 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e2e6d49f-130e-4161-9ee5-72ad4a0bc56c-secret-volume\") pod \"collect-profiles-29320590-5z254\" (UID: \"e2e6d49f-130e-4161-9ee5-72ad4a0bc56c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320590-5z254" Sep 30 12:30:00 crc kubenswrapper[5002]: I0930 12:30:00.194591 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e2e6d49f-130e-4161-9ee5-72ad4a0bc56c-config-volume\") pod \"collect-profiles-29320590-5z254\" (UID: \"e2e6d49f-130e-4161-9ee5-72ad4a0bc56c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320590-5z254" Sep 30 12:30:00 crc kubenswrapper[5002]: I0930 12:30:00.295874 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e2e6d49f-130e-4161-9ee5-72ad4a0bc56c-secret-volume\") pod \"collect-profiles-29320590-5z254\" (UID: \"e2e6d49f-130e-4161-9ee5-72ad4a0bc56c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320590-5z254" Sep 30 12:30:00 crc kubenswrapper[5002]: I0930 12:30:00.296388 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e2e6d49f-130e-4161-9ee5-72ad4a0bc56c-config-volume\") pod \"collect-profiles-29320590-5z254\" (UID: \"e2e6d49f-130e-4161-9ee5-72ad4a0bc56c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320590-5z254" Sep 30 12:30:00 crc kubenswrapper[5002]: I0930 12:30:00.296531 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8gndj\" (UniqueName: \"kubernetes.io/projected/e2e6d49f-130e-4161-9ee5-72ad4a0bc56c-kube-api-access-8gndj\") pod \"collect-profiles-29320590-5z254\" (UID: \"e2e6d49f-130e-4161-9ee5-72ad4a0bc56c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320590-5z254" Sep 30 12:30:00 crc kubenswrapper[5002]: I0930 12:30:00.297691 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e2e6d49f-130e-4161-9ee5-72ad4a0bc56c-config-volume\") pod \"collect-profiles-29320590-5z254\" (UID: \"e2e6d49f-130e-4161-9ee5-72ad4a0bc56c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320590-5z254" Sep 30 12:30:00 crc kubenswrapper[5002]: I0930 12:30:00.301728 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e2e6d49f-130e-4161-9ee5-72ad4a0bc56c-secret-volume\") pod \"collect-profiles-29320590-5z254\" (UID: \"e2e6d49f-130e-4161-9ee5-72ad4a0bc56c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320590-5z254" Sep 30 12:30:00 crc kubenswrapper[5002]: I0930 12:30:00.318634 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8gndj\" (UniqueName: \"kubernetes.io/projected/e2e6d49f-130e-4161-9ee5-72ad4a0bc56c-kube-api-access-8gndj\") pod \"collect-profiles-29320590-5z254\" (UID: \"e2e6d49f-130e-4161-9ee5-72ad4a0bc56c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320590-5z254" Sep 30 12:30:00 crc kubenswrapper[5002]: I0930 12:30:00.476798 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320590-5z254" Sep 30 12:30:00 crc kubenswrapper[5002]: I0930 12:30:00.674571 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320590-5z254"] Sep 30 12:30:00 crc kubenswrapper[5002]: I0930 12:30:00.751226 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320590-5z254" event={"ID":"e2e6d49f-130e-4161-9ee5-72ad4a0bc56c","Type":"ContainerStarted","Data":"00b629ed0cd3e930a9aafcead5d4cfcd80ec03b5e4a926f644872961f22e1d72"} Sep 30 12:30:01 crc kubenswrapper[5002]: I0930 12:30:01.760172 5002 generic.go:334] "Generic (PLEG): container finished" podID="e2e6d49f-130e-4161-9ee5-72ad4a0bc56c" containerID="b8e2fa48c2bd78b2959ae19ad926ca8b3fda7ab4ae4fc6033ae2f3eab06dfdeb" exitCode=0 Sep 30 12:30:01 crc kubenswrapper[5002]: I0930 12:30:01.760277 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320590-5z254" event={"ID":"e2e6d49f-130e-4161-9ee5-72ad4a0bc56c","Type":"ContainerDied","Data":"b8e2fa48c2bd78b2959ae19ad926ca8b3fda7ab4ae4fc6033ae2f3eab06dfdeb"} Sep 30 12:30:02 crc kubenswrapper[5002]: I0930 12:30:02.098089 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:30:02 crc kubenswrapper[5002]: I0930 12:30:02.098171 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:30:02 crc kubenswrapper[5002]: I0930 12:30:02.098249 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" Sep 30 12:30:02 crc kubenswrapper[5002]: I0930 12:30:02.099136 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2265b6a0e732448a595146d842013f7245c5b783b761a0762decdaca959b8f6c"} pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 12:30:02 crc kubenswrapper[5002]: I0930 12:30:02.099243 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" containerID="cri-o://2265b6a0e732448a595146d842013f7245c5b783b761a0762decdaca959b8f6c" gracePeriod=600 Sep 30 12:30:02 crc kubenswrapper[5002]: I0930 12:30:02.766215 5002 generic.go:334] "Generic (PLEG): container finished" podID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerID="2265b6a0e732448a595146d842013f7245c5b783b761a0762decdaca959b8f6c" exitCode=0 Sep 30 12:30:02 crc kubenswrapper[5002]: I0930 12:30:02.766287 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerDied","Data":"2265b6a0e732448a595146d842013f7245c5b783b761a0762decdaca959b8f6c"} Sep 30 12:30:02 crc kubenswrapper[5002]: I0930 12:30:02.766563 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerStarted","Data":"192b85eb900ab298ef92f87ae6539f9cf20c972d71035d71486bd4ab14bc6108"} Sep 30 12:30:02 crc kubenswrapper[5002]: I0930 12:30:02.766581 5002 scope.go:117] "RemoveContainer" containerID="a6a0c92d3cdc4d9bfc4039ca649c102e891457b2cdc698fa8e2c73771783ffd2" Sep 30 12:30:02 crc kubenswrapper[5002]: I0930 12:30:02.970947 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320590-5z254" Sep 30 12:30:03 crc kubenswrapper[5002]: I0930 12:30:03.033796 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e2e6d49f-130e-4161-9ee5-72ad4a0bc56c-secret-volume\") pod \"e2e6d49f-130e-4161-9ee5-72ad4a0bc56c\" (UID: \"e2e6d49f-130e-4161-9ee5-72ad4a0bc56c\") " Sep 30 12:30:03 crc kubenswrapper[5002]: I0930 12:30:03.033839 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e2e6d49f-130e-4161-9ee5-72ad4a0bc56c-config-volume\") pod \"e2e6d49f-130e-4161-9ee5-72ad4a0bc56c\" (UID: \"e2e6d49f-130e-4161-9ee5-72ad4a0bc56c\") " Sep 30 12:30:03 crc kubenswrapper[5002]: I0930 12:30:03.033865 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8gndj\" (UniqueName: \"kubernetes.io/projected/e2e6d49f-130e-4161-9ee5-72ad4a0bc56c-kube-api-access-8gndj\") pod \"e2e6d49f-130e-4161-9ee5-72ad4a0bc56c\" (UID: \"e2e6d49f-130e-4161-9ee5-72ad4a0bc56c\") " Sep 30 12:30:03 crc kubenswrapper[5002]: I0930 12:30:03.034648 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2e6d49f-130e-4161-9ee5-72ad4a0bc56c-config-volume" (OuterVolumeSpecName: "config-volume") pod "e2e6d49f-130e-4161-9ee5-72ad4a0bc56c" (UID: "e2e6d49f-130e-4161-9ee5-72ad4a0bc56c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:30:03 crc kubenswrapper[5002]: I0930 12:30:03.038737 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2e6d49f-130e-4161-9ee5-72ad4a0bc56c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e2e6d49f-130e-4161-9ee5-72ad4a0bc56c" (UID: "e2e6d49f-130e-4161-9ee5-72ad4a0bc56c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:30:03 crc kubenswrapper[5002]: I0930 12:30:03.039066 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2e6d49f-130e-4161-9ee5-72ad4a0bc56c-kube-api-access-8gndj" (OuterVolumeSpecName: "kube-api-access-8gndj") pod "e2e6d49f-130e-4161-9ee5-72ad4a0bc56c" (UID: "e2e6d49f-130e-4161-9ee5-72ad4a0bc56c"). InnerVolumeSpecName "kube-api-access-8gndj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:30:03 crc kubenswrapper[5002]: I0930 12:30:03.135640 5002 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e2e6d49f-130e-4161-9ee5-72ad4a0bc56c-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 12:30:03 crc kubenswrapper[5002]: I0930 12:30:03.135692 5002 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e2e6d49f-130e-4161-9ee5-72ad4a0bc56c-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 12:30:03 crc kubenswrapper[5002]: I0930 12:30:03.135715 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8gndj\" (UniqueName: \"kubernetes.io/projected/e2e6d49f-130e-4161-9ee5-72ad4a0bc56c-kube-api-access-8gndj\") on node \"crc\" DevicePath \"\"" Sep 30 12:30:03 crc kubenswrapper[5002]: I0930 12:30:03.776413 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320590-5z254" event={"ID":"e2e6d49f-130e-4161-9ee5-72ad4a0bc56c","Type":"ContainerDied","Data":"00b629ed0cd3e930a9aafcead5d4cfcd80ec03b5e4a926f644872961f22e1d72"} Sep 30 12:30:03 crc kubenswrapper[5002]: I0930 12:30:03.776517 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="00b629ed0cd3e930a9aafcead5d4cfcd80ec03b5e4a926f644872961f22e1d72" Sep 30 12:30:03 crc kubenswrapper[5002]: I0930 12:30:03.776450 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320590-5z254" Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.044716 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-974dk"] Sep 30 12:30:26 crc kubenswrapper[5002]: E0930 12:30:26.045571 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2e6d49f-130e-4161-9ee5-72ad4a0bc56c" containerName="collect-profiles" Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.045588 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2e6d49f-130e-4161-9ee5-72ad4a0bc56c" containerName="collect-profiles" Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.045878 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2e6d49f-130e-4161-9ee5-72ad4a0bc56c" containerName="collect-profiles" Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.046249 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-974dk" Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.050391 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.051811 5002 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-mwgn7" Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.054696 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.061551 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-974dk"] Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.065408 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-6z4b2"] Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.066294 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-6z4b2" Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.070045 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-h79vx"] Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.070807 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-h79vx" Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.071915 5002 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-cx98q" Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.075935 5002 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-88tpd" Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.090195 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-6z4b2"] Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.093327 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-h79vx"] Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.128339 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqz2v\" (UniqueName: \"kubernetes.io/projected/fa128ff5-c231-478a-8a20-a617f7187459-kube-api-access-bqz2v\") pod \"cert-manager-5b446d88c5-6z4b2\" (UID: \"fa128ff5-c231-478a-8a20-a617f7187459\") " pod="cert-manager/cert-manager-5b446d88c5-6z4b2" Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.128444 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jcmf\" (UniqueName: \"kubernetes.io/projected/cf984127-3ede-48b0-84a6-aaa1c3c321c1-kube-api-access-6jcmf\") pod \"cert-manager-cainjector-7f985d654d-974dk\" (UID: \"cf984127-3ede-48b0-84a6-aaa1c3c321c1\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-974dk" Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.128561 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvk75\" (UniqueName: \"kubernetes.io/projected/22886b45-e205-4eed-8610-087e217a2f3e-kube-api-access-jvk75\") pod \"cert-manager-webhook-5655c58dd6-h79vx\" (UID: \"22886b45-e205-4eed-8610-087e217a2f3e\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-h79vx" Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.229760 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jcmf\" (UniqueName: \"kubernetes.io/projected/cf984127-3ede-48b0-84a6-aaa1c3c321c1-kube-api-access-6jcmf\") pod \"cert-manager-cainjector-7f985d654d-974dk\" (UID: \"cf984127-3ede-48b0-84a6-aaa1c3c321c1\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-974dk" Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.229818 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvk75\" (UniqueName: \"kubernetes.io/projected/22886b45-e205-4eed-8610-087e217a2f3e-kube-api-access-jvk75\") pod \"cert-manager-webhook-5655c58dd6-h79vx\" (UID: \"22886b45-e205-4eed-8610-087e217a2f3e\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-h79vx" Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.229902 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqz2v\" (UniqueName: \"kubernetes.io/projected/fa128ff5-c231-478a-8a20-a617f7187459-kube-api-access-bqz2v\") pod \"cert-manager-5b446d88c5-6z4b2\" (UID: \"fa128ff5-c231-478a-8a20-a617f7187459\") " pod="cert-manager/cert-manager-5b446d88c5-6z4b2" Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.249022 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jcmf\" (UniqueName: \"kubernetes.io/projected/cf984127-3ede-48b0-84a6-aaa1c3c321c1-kube-api-access-6jcmf\") pod \"cert-manager-cainjector-7f985d654d-974dk\" (UID: \"cf984127-3ede-48b0-84a6-aaa1c3c321c1\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-974dk" Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.249590 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvk75\" (UniqueName: \"kubernetes.io/projected/22886b45-e205-4eed-8610-087e217a2f3e-kube-api-access-jvk75\") pod \"cert-manager-webhook-5655c58dd6-h79vx\" (UID: \"22886b45-e205-4eed-8610-087e217a2f3e\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-h79vx" Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.251521 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqz2v\" (UniqueName: \"kubernetes.io/projected/fa128ff5-c231-478a-8a20-a617f7187459-kube-api-access-bqz2v\") pod \"cert-manager-5b446d88c5-6z4b2\" (UID: \"fa128ff5-c231-478a-8a20-a617f7187459\") " pod="cert-manager/cert-manager-5b446d88c5-6z4b2" Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.377055 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-974dk" Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.388341 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-6z4b2" Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.398620 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-h79vx" Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.618170 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-h79vx"] Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.624170 5002 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.774958 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-6z4b2"] Sep 30 12:30:26 crc kubenswrapper[5002]: W0930 12:30:26.780286 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfa128ff5_c231_478a_8a20_a617f7187459.slice/crio-1bbab4be6f0fe4e0a193afe156a12e3a571cc60b8db722a72ea4e3fdf8152d98 WatchSource:0}: Error finding container 1bbab4be6f0fe4e0a193afe156a12e3a571cc60b8db722a72ea4e3fdf8152d98: Status 404 returned error can't find the container with id 1bbab4be6f0fe4e0a193afe156a12e3a571cc60b8db722a72ea4e3fdf8152d98 Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.793460 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-974dk"] Sep 30 12:30:26 crc kubenswrapper[5002]: W0930 12:30:26.804644 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf984127_3ede_48b0_84a6_aaa1c3c321c1.slice/crio-292a9a718b8643e919c3e6c19dbaf2c57d98eb5cfc601d872ef5efd9f704483e WatchSource:0}: Error finding container 292a9a718b8643e919c3e6c19dbaf2c57d98eb5cfc601d872ef5efd9f704483e: Status 404 returned error can't find the container with id 292a9a718b8643e919c3e6c19dbaf2c57d98eb5cfc601d872ef5efd9f704483e Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.919612 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-h79vx" event={"ID":"22886b45-e205-4eed-8610-087e217a2f3e","Type":"ContainerStarted","Data":"dd7f7a3ea4f14106c725d7a86649893199168f198496da74490f499dceeb5fd9"} Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.920908 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-974dk" event={"ID":"cf984127-3ede-48b0-84a6-aaa1c3c321c1","Type":"ContainerStarted","Data":"292a9a718b8643e919c3e6c19dbaf2c57d98eb5cfc601d872ef5efd9f704483e"} Sep 30 12:30:26 crc kubenswrapper[5002]: I0930 12:30:26.922360 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-6z4b2" event={"ID":"fa128ff5-c231-478a-8a20-a617f7187459","Type":"ContainerStarted","Data":"1bbab4be6f0fe4e0a193afe156a12e3a571cc60b8db722a72ea4e3fdf8152d98"} Sep 30 12:30:32 crc kubenswrapper[5002]: I0930 12:30:32.963724 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-h79vx" event={"ID":"22886b45-e205-4eed-8610-087e217a2f3e","Type":"ContainerStarted","Data":"324454d0778d59dd7291ae49fdaf1964d57a7c4d2c0129a4c667a30fe6b9f5d0"} Sep 30 12:30:32 crc kubenswrapper[5002]: I0930 12:30:32.966100 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-h79vx" Sep 30 12:30:32 crc kubenswrapper[5002]: I0930 12:30:32.969847 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-974dk" event={"ID":"cf984127-3ede-48b0-84a6-aaa1c3c321c1","Type":"ContainerStarted","Data":"c00de56a990f88f0098c0e1e2418d6fc3d5e69661bfc6db001277434dd3943ed"} Sep 30 12:30:32 crc kubenswrapper[5002]: I0930 12:30:32.981533 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-h79vx" podStartSLOduration=1.5637710120000001 podStartE2EDuration="6.981515579s" podCreationTimestamp="2025-09-30 12:30:26 +0000 UTC" firstStartedPulling="2025-09-30 12:30:26.623989048 +0000 UTC m=+600.873671194" lastFinishedPulling="2025-09-30 12:30:32.041733615 +0000 UTC m=+606.291415761" observedRunningTime="2025-09-30 12:30:32.978486903 +0000 UTC m=+607.228169059" watchObservedRunningTime="2025-09-30 12:30:32.981515579 +0000 UTC m=+607.231197725" Sep 30 12:30:32 crc kubenswrapper[5002]: I0930 12:30:32.996570 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-974dk" podStartSLOduration=1.808932921 podStartE2EDuration="6.996549009s" podCreationTimestamp="2025-09-30 12:30:26 +0000 UTC" firstStartedPulling="2025-09-30 12:30:26.807329828 +0000 UTC m=+601.057011994" lastFinishedPulling="2025-09-30 12:30:31.994945926 +0000 UTC m=+606.244628082" observedRunningTime="2025-09-30 12:30:32.994579224 +0000 UTC m=+607.244261380" watchObservedRunningTime="2025-09-30 12:30:32.996549009 +0000 UTC m=+607.246231155" Sep 30 12:30:33 crc kubenswrapper[5002]: I0930 12:30:33.977707 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-6z4b2" event={"ID":"fa128ff5-c231-478a-8a20-a617f7187459","Type":"ContainerStarted","Data":"97041e6c759e0fc0f819bbda420920936ba138dbadef8497bc6da287cfa078c4"} Sep 30 12:30:33 crc kubenswrapper[5002]: I0930 12:30:33.997718 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-6z4b2" podStartSLOduration=1.713689386 podStartE2EDuration="7.997694039s" podCreationTimestamp="2025-09-30 12:30:26 +0000 UTC" firstStartedPulling="2025-09-30 12:30:26.783674116 +0000 UTC m=+601.033356262" lastFinishedPulling="2025-09-30 12:30:33.067678729 +0000 UTC m=+607.317360915" observedRunningTime="2025-09-30 12:30:33.993865863 +0000 UTC m=+608.243548069" watchObservedRunningTime="2025-09-30 12:30:33.997694039 +0000 UTC m=+608.247376205" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.510244 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4pvsr"] Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.510974 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="ovn-controller" containerID="cri-o://a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9" gracePeriod=30 Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.511326 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="sbdb" containerID="cri-o://6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411" gracePeriod=30 Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.511376 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="nbdb" containerID="cri-o://d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f" gracePeriod=30 Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.511412 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="northd" containerID="cri-o://ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6" gracePeriod=30 Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.511439 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9" gracePeriod=30 Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.511487 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="kube-rbac-proxy-node" containerID="cri-o://8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170" gracePeriod=30 Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.511518 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="ovn-acl-logging" containerID="cri-o://bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4" gracePeriod=30 Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.580270 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="ovnkube-controller" containerID="cri-o://f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f" gracePeriod=30 Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.882452 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pvsr_7095aa7a-d067-4977-bdc5-3a45a52a6a39/ovnkube-controller/3.log" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.885152 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pvsr_7095aa7a-d067-4977-bdc5-3a45a52a6a39/ovn-acl-logging/0.log" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.885657 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pvsr_7095aa7a-d067-4977-bdc5-3a45a52a6a39/ovn-controller/0.log" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.886233 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.962922 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4lk28"] Sep 30 12:30:36 crc kubenswrapper[5002]: E0930 12:30:36.963205 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="northd" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.963225 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="northd" Sep 30 12:30:36 crc kubenswrapper[5002]: E0930 12:30:36.963254 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="kube-rbac-proxy-ovn-metrics" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.963267 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="kube-rbac-proxy-ovn-metrics" Sep 30 12:30:36 crc kubenswrapper[5002]: E0930 12:30:36.963284 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="ovnkube-controller" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.963295 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="ovnkube-controller" Sep 30 12:30:36 crc kubenswrapper[5002]: E0930 12:30:36.963308 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="ovn-acl-logging" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.963319 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="ovn-acl-logging" Sep 30 12:30:36 crc kubenswrapper[5002]: E0930 12:30:36.963336 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="nbdb" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.963347 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="nbdb" Sep 30 12:30:36 crc kubenswrapper[5002]: E0930 12:30:36.963361 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="sbdb" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.963372 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="sbdb" Sep 30 12:30:36 crc kubenswrapper[5002]: E0930 12:30:36.963387 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="ovnkube-controller" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.963399 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="ovnkube-controller" Sep 30 12:30:36 crc kubenswrapper[5002]: E0930 12:30:36.963413 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="ovn-controller" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.963424 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="ovn-controller" Sep 30 12:30:36 crc kubenswrapper[5002]: E0930 12:30:36.963440 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="kube-rbac-proxy-node" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.963453 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="kube-rbac-proxy-node" Sep 30 12:30:36 crc kubenswrapper[5002]: E0930 12:30:36.963493 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="kubecfg-setup" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.963505 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="kubecfg-setup" Sep 30 12:30:36 crc kubenswrapper[5002]: E0930 12:30:36.963515 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="ovnkube-controller" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.963524 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="ovnkube-controller" Sep 30 12:30:36 crc kubenswrapper[5002]: E0930 12:30:36.963538 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="ovnkube-controller" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.963547 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="ovnkube-controller" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.963690 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="ovnkube-controller" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.963705 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="kube-rbac-proxy-node" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.963721 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="ovn-acl-logging" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.963742 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="nbdb" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.963756 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="ovnkube-controller" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.963766 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="sbdb" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.963779 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="northd" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.963793 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="ovnkube-controller" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.963838 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="ovnkube-controller" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.963851 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="ovn-controller" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.963862 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="kube-rbac-proxy-ovn-metrics" Sep 30 12:30:36 crc kubenswrapper[5002]: E0930 12:30:36.964028 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="ovnkube-controller" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.964044 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="ovnkube-controller" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.964199 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerName="ovnkube-controller" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.970051 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.977071 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-ovnkube-config\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.977137 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65stw\" (UniqueName: \"kubernetes.io/projected/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-kube-api-access-65stw\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.977266 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-ovn-node-metrics-cert\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.977326 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-etc-openvswitch\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.977345 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-host-run-ovn-kubernetes\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.977366 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-host-kubelet\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.977392 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-host-slash\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.977450 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-node-log\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.977510 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-env-overrides\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.977550 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-host-run-netns\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.977602 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-run-ovn\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.977638 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.977700 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-run-systemd\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.977732 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-log-socket\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.977774 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-systemd-units\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.977814 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-host-cni-netd\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.977850 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-var-lib-openvswitch\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.977894 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-run-openvswitch\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.977928 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-host-cni-bin\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:36 crc kubenswrapper[5002]: I0930 12:30:36.977968 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-ovnkube-script-lib\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.000892 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ttfn8_2bd9f18d-bfb3-4bd7-9a87-242029cd3200/kube-multus/2.log" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.001360 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ttfn8_2bd9f18d-bfb3-4bd7-9a87-242029cd3200/kube-multus/1.log" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.001394 5002 generic.go:334] "Generic (PLEG): container finished" podID="2bd9f18d-bfb3-4bd7-9a87-242029cd3200" containerID="c5f7e86f7e2139fe8fdc7a6d09f418a223f9da04a6950e5dec018265268c7de5" exitCode=2 Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.001441 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-ttfn8" event={"ID":"2bd9f18d-bfb3-4bd7-9a87-242029cd3200","Type":"ContainerDied","Data":"c5f7e86f7e2139fe8fdc7a6d09f418a223f9da04a6950e5dec018265268c7de5"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.001490 5002 scope.go:117] "RemoveContainer" containerID="4cf7d940b7333c33f9092bf1a630d298ae51fee0947c17ef09d902ae5dc1103c" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.002064 5002 scope.go:117] "RemoveContainer" containerID="c5f7e86f7e2139fe8fdc7a6d09f418a223f9da04a6950e5dec018265268c7de5" Sep 30 12:30:37 crc kubenswrapper[5002]: E0930 12:30:37.002288 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-ttfn8_openshift-multus(2bd9f18d-bfb3-4bd7-9a87-242029cd3200)\"" pod="openshift-multus/multus-ttfn8" podUID="2bd9f18d-bfb3-4bd7-9a87-242029cd3200" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.009135 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pvsr_7095aa7a-d067-4977-bdc5-3a45a52a6a39/ovnkube-controller/3.log" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.012098 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pvsr_7095aa7a-d067-4977-bdc5-3a45a52a6a39/ovn-acl-logging/0.log" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.013004 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pvsr_7095aa7a-d067-4977-bdc5-3a45a52a6a39/ovn-controller/0.log" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.013803 5002 generic.go:334] "Generic (PLEG): container finished" podID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerID="f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f" exitCode=0 Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.013853 5002 generic.go:334] "Generic (PLEG): container finished" podID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerID="6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411" exitCode=0 Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.013869 5002 generic.go:334] "Generic (PLEG): container finished" podID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerID="d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f" exitCode=0 Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.013882 5002 generic.go:334] "Generic (PLEG): container finished" podID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerID="ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6" exitCode=0 Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.013905 5002 generic.go:334] "Generic (PLEG): container finished" podID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerID="89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9" exitCode=0 Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.013918 5002 generic.go:334] "Generic (PLEG): container finished" podID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerID="8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170" exitCode=0 Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.013930 5002 generic.go:334] "Generic (PLEG): container finished" podID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerID="bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4" exitCode=143 Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.013943 5002 generic.go:334] "Generic (PLEG): container finished" podID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" containerID="a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9" exitCode=143 Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.013972 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" event={"ID":"7095aa7a-d067-4977-bdc5-3a45a52a6a39","Type":"ContainerDied","Data":"f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014006 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" event={"ID":"7095aa7a-d067-4977-bdc5-3a45a52a6a39","Type":"ContainerDied","Data":"6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014028 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" event={"ID":"7095aa7a-d067-4977-bdc5-3a45a52a6a39","Type":"ContainerDied","Data":"d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014045 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" event={"ID":"7095aa7a-d067-4977-bdc5-3a45a52a6a39","Type":"ContainerDied","Data":"ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014062 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" event={"ID":"7095aa7a-d067-4977-bdc5-3a45a52a6a39","Type":"ContainerDied","Data":"89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014077 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" event={"ID":"7095aa7a-d067-4977-bdc5-3a45a52a6a39","Type":"ContainerDied","Data":"8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014107 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014094 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014132 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014143 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014155 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014166 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014176 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014186 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014199 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014208 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014218 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014235 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" event={"ID":"7095aa7a-d067-4977-bdc5-3a45a52a6a39","Type":"ContainerDied","Data":"bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014252 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014263 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014273 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014283 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014292 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014299 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014306 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014313 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014320 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014327 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014338 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" event={"ID":"7095aa7a-d067-4977-bdc5-3a45a52a6a39","Type":"ContainerDied","Data":"a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014351 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014360 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014368 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014375 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014384 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014391 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014399 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014406 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014413 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014421 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014431 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4pvsr" event={"ID":"7095aa7a-d067-4977-bdc5-3a45a52a6a39","Type":"ContainerDied","Data":"9e7540d9f3130e8c7b820a75697164f7033839bdc4652ef7bb80df91032bfff5"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014443 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014452 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014460 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014467 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014530 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014540 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014549 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014560 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014569 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.014578 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253"} Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.037356 5002 scope.go:117] "RemoveContainer" containerID="f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.054620 5002 scope.go:117] "RemoveContainer" containerID="9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.072511 5002 scope.go:117] "RemoveContainer" containerID="6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.078798 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-node-log\") pod \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.078908 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-node-log" (OuterVolumeSpecName: "node-log") pod "7095aa7a-d067-4977-bdc5-3a45a52a6a39" (UID: "7095aa7a-d067-4977-bdc5-3a45a52a6a39"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079002 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-kubelet\") pod \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079069 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "7095aa7a-d067-4977-bdc5-3a45a52a6a39" (UID: "7095aa7a-d067-4977-bdc5-3a45a52a6a39"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079092 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-run-netns\") pod \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079170 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-slash\") pod \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079204 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-run-systemd\") pod \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079236 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5bntn\" (UniqueName: \"kubernetes.io/projected/7095aa7a-d067-4977-bdc5-3a45a52a6a39-kube-api-access-5bntn\") pod \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079280 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-slash" (OuterVolumeSpecName: "host-slash") pod "7095aa7a-d067-4977-bdc5-3a45a52a6a39" (UID: "7095aa7a-d067-4977-bdc5-3a45a52a6a39"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079297 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7095aa7a-d067-4977-bdc5-3a45a52a6a39-ovnkube-config\") pod \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079365 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-log-socket\") pod \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079422 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-cni-netd\") pod \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079439 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-systemd-units\") pod \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079460 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-run-ovn\") pod \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079450 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-log-socket" (OuterVolumeSpecName: "log-socket") pod "7095aa7a-d067-4977-bdc5-3a45a52a6a39" (UID: "7095aa7a-d067-4977-bdc5-3a45a52a6a39"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079503 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-var-lib-cni-networks-ovn-kubernetes\") pod \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079507 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "7095aa7a-d067-4977-bdc5-3a45a52a6a39" (UID: "7095aa7a-d067-4977-bdc5-3a45a52a6a39"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079527 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "7095aa7a-d067-4977-bdc5-3a45a52a6a39" (UID: "7095aa7a-d067-4977-bdc5-3a45a52a6a39"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079523 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-cni-bin\") pod \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079564 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "7095aa7a-d067-4977-bdc5-3a45a52a6a39" (UID: "7095aa7a-d067-4977-bdc5-3a45a52a6a39"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079552 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "7095aa7a-d067-4977-bdc5-3a45a52a6a39" (UID: "7095aa7a-d067-4977-bdc5-3a45a52a6a39"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079539 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "7095aa7a-d067-4977-bdc5-3a45a52a6a39" (UID: "7095aa7a-d067-4977-bdc5-3a45a52a6a39"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079623 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7095aa7a-d067-4977-bdc5-3a45a52a6a39-ovn-node-metrics-cert\") pod \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079655 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-run-openvswitch\") pod \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079677 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-var-lib-openvswitch\") pod \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079696 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7095aa7a-d067-4977-bdc5-3a45a52a6a39-env-overrides\") pod \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079718 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7095aa7a-d067-4977-bdc5-3a45a52a6a39-ovnkube-script-lib\") pod \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079747 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-etc-openvswitch\") pod \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079766 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-run-ovn-kubernetes\") pod \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\" (UID: \"7095aa7a-d067-4977-bdc5-3a45a52a6a39\") " Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079722 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "7095aa7a-d067-4977-bdc5-3a45a52a6a39" (UID: "7095aa7a-d067-4977-bdc5-3a45a52a6a39"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079767 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "7095aa7a-d067-4977-bdc5-3a45a52a6a39" (UID: "7095aa7a-d067-4977-bdc5-3a45a52a6a39"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079841 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "7095aa7a-d067-4977-bdc5-3a45a52a6a39" (UID: "7095aa7a-d067-4977-bdc5-3a45a52a6a39"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079914 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "7095aa7a-d067-4977-bdc5-3a45a52a6a39" (UID: "7095aa7a-d067-4977-bdc5-3a45a52a6a39"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079922 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-ovnkube-script-lib\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.079984 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-ovnkube-config\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080018 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65stw\" (UniqueName: \"kubernetes.io/projected/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-kube-api-access-65stw\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080046 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-ovn-node-metrics-cert\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080141 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-etc-openvswitch\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080165 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-host-run-ovn-kubernetes\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080186 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7095aa7a-d067-4977-bdc5-3a45a52a6a39-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "7095aa7a-d067-4977-bdc5-3a45a52a6a39" (UID: "7095aa7a-d067-4977-bdc5-3a45a52a6a39"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080190 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-host-kubelet\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080214 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7095aa7a-d067-4977-bdc5-3a45a52a6a39-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "7095aa7a-d067-4977-bdc5-3a45a52a6a39" (UID: "7095aa7a-d067-4977-bdc5-3a45a52a6a39"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080227 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-host-kubelet\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080263 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-host-slash\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080323 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-host-run-ovn-kubernetes\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080368 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-etc-openvswitch\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080227 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-host-slash\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080423 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-node-log\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080439 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-env-overrides\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080465 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-host-run-netns\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080517 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-run-ovn\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080549 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080590 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-run-systemd\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080609 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-log-socket\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080616 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "7095aa7a-d067-4977-bdc5-3a45a52a6a39" (UID: "7095aa7a-d067-4977-bdc5-3a45a52a6a39"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080641 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-systemd-units\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080697 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-host-cni-netd\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080723 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-var-lib-openvswitch\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080744 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-run-openvswitch\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080768 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-host-cni-bin\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080825 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-ovnkube-script-lib\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080841 5002 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-log-socket\") on node \"crc\" DevicePath \"\"" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080869 5002 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-cni-netd\") on node \"crc\" DevicePath \"\"" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080886 5002 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-systemd-units\") on node \"crc\" DevicePath \"\"" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080893 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-node-log\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080931 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-log-socket\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080872 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-host-cni-bin\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080944 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-run-ovn\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080953 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-ovnkube-config\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080974 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-systemd-units\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080915 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7095aa7a-d067-4977-bdc5-3a45a52a6a39-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "7095aa7a-d067-4977-bdc5-3a45a52a6a39" (UID: "7095aa7a-d067-4977-bdc5-3a45a52a6a39"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.081008 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-host-run-netns\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.081013 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-var-lib-openvswitch\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.081043 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-host-cni-netd\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.081046 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-run-openvswitch\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.080899 5002 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.081076 5002 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.081082 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-run-systemd\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.081097 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.081116 5002 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-cni-bin\") on node \"crc\" DevicePath \"\"" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.081128 5002 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-run-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.081138 5002 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.081148 5002 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7095aa7a-d067-4977-bdc5-3a45a52a6a39-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.081177 5002 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7095aa7a-d067-4977-bdc5-3a45a52a6a39-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.081199 5002 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.081213 5002 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.081225 5002 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-node-log\") on node \"crc\" DevicePath \"\"" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.081234 5002 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-kubelet\") on node \"crc\" DevicePath \"\"" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.081242 5002 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-run-netns\") on node \"crc\" DevicePath \"\"" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.081249 5002 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-host-slash\") on node \"crc\" DevicePath \"\"" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.081304 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-env-overrides\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.085089 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7095aa7a-d067-4977-bdc5-3a45a52a6a39-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "7095aa7a-d067-4977-bdc5-3a45a52a6a39" (UID: "7095aa7a-d067-4977-bdc5-3a45a52a6a39"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.086405 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-ovn-node-metrics-cert\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.089152 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7095aa7a-d067-4977-bdc5-3a45a52a6a39-kube-api-access-5bntn" (OuterVolumeSpecName: "kube-api-access-5bntn") pod "7095aa7a-d067-4977-bdc5-3a45a52a6a39" (UID: "7095aa7a-d067-4977-bdc5-3a45a52a6a39"). InnerVolumeSpecName "kube-api-access-5bntn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.091544 5002 scope.go:117] "RemoveContainer" containerID="d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.094417 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "7095aa7a-d067-4977-bdc5-3a45a52a6a39" (UID: "7095aa7a-d067-4977-bdc5-3a45a52a6a39"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.095360 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65stw\" (UniqueName: \"kubernetes.io/projected/d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6-kube-api-access-65stw\") pod \"ovnkube-node-4lk28\" (UID: \"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.109159 5002 scope.go:117] "RemoveContainer" containerID="ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.125558 5002 scope.go:117] "RemoveContainer" containerID="89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.143806 5002 scope.go:117] "RemoveContainer" containerID="8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.157772 5002 scope.go:117] "RemoveContainer" containerID="bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.172001 5002 scope.go:117] "RemoveContainer" containerID="a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.182193 5002 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7095aa7a-d067-4977-bdc5-3a45a52a6a39-run-systemd\") on node \"crc\" DevicePath \"\"" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.182231 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5bntn\" (UniqueName: \"kubernetes.io/projected/7095aa7a-d067-4977-bdc5-3a45a52a6a39-kube-api-access-5bntn\") on node \"crc\" DevicePath \"\"" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.182245 5002 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7095aa7a-d067-4977-bdc5-3a45a52a6a39-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.182258 5002 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7095aa7a-d067-4977-bdc5-3a45a52a6a39-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.187822 5002 scope.go:117] "RemoveContainer" containerID="29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.200648 5002 scope.go:117] "RemoveContainer" containerID="f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f" Sep 30 12:30:37 crc kubenswrapper[5002]: E0930 12:30:37.201214 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f\": container with ID starting with f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f not found: ID does not exist" containerID="f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.201243 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f"} err="failed to get container status \"f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f\": rpc error: code = NotFound desc = could not find container \"f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f\": container with ID starting with f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.201268 5002 scope.go:117] "RemoveContainer" containerID="9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5" Sep 30 12:30:37 crc kubenswrapper[5002]: E0930 12:30:37.201571 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5\": container with ID starting with 9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5 not found: ID does not exist" containerID="9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.201678 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5"} err="failed to get container status \"9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5\": rpc error: code = NotFound desc = could not find container \"9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5\": container with ID starting with 9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.201712 5002 scope.go:117] "RemoveContainer" containerID="6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411" Sep 30 12:30:37 crc kubenswrapper[5002]: E0930 12:30:37.202058 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\": container with ID starting with 6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411 not found: ID does not exist" containerID="6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.202089 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411"} err="failed to get container status \"6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\": rpc error: code = NotFound desc = could not find container \"6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\": container with ID starting with 6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.202107 5002 scope.go:117] "RemoveContainer" containerID="d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f" Sep 30 12:30:37 crc kubenswrapper[5002]: E0930 12:30:37.202327 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\": container with ID starting with d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f not found: ID does not exist" containerID="d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.202380 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f"} err="failed to get container status \"d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\": rpc error: code = NotFound desc = could not find container \"d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\": container with ID starting with d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.202397 5002 scope.go:117] "RemoveContainer" containerID="ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6" Sep 30 12:30:37 crc kubenswrapper[5002]: E0930 12:30:37.202919 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\": container with ID starting with ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6 not found: ID does not exist" containerID="ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.202942 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6"} err="failed to get container status \"ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\": rpc error: code = NotFound desc = could not find container \"ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\": container with ID starting with ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.202957 5002 scope.go:117] "RemoveContainer" containerID="89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9" Sep 30 12:30:37 crc kubenswrapper[5002]: E0930 12:30:37.203224 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\": container with ID starting with 89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9 not found: ID does not exist" containerID="89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.203258 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9"} err="failed to get container status \"89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\": rpc error: code = NotFound desc = could not find container \"89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\": container with ID starting with 89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.203275 5002 scope.go:117] "RemoveContainer" containerID="8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170" Sep 30 12:30:37 crc kubenswrapper[5002]: E0930 12:30:37.203534 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\": container with ID starting with 8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170 not found: ID does not exist" containerID="8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.203587 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170"} err="failed to get container status \"8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\": rpc error: code = NotFound desc = could not find container \"8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\": container with ID starting with 8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.203606 5002 scope.go:117] "RemoveContainer" containerID="bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4" Sep 30 12:30:37 crc kubenswrapper[5002]: E0930 12:30:37.203878 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\": container with ID starting with bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4 not found: ID does not exist" containerID="bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.203906 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4"} err="failed to get container status \"bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\": rpc error: code = NotFound desc = could not find container \"bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\": container with ID starting with bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.203922 5002 scope.go:117] "RemoveContainer" containerID="a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9" Sep 30 12:30:37 crc kubenswrapper[5002]: E0930 12:30:37.204247 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\": container with ID starting with a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9 not found: ID does not exist" containerID="a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.204274 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9"} err="failed to get container status \"a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\": rpc error: code = NotFound desc = could not find container \"a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\": container with ID starting with a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.204289 5002 scope.go:117] "RemoveContainer" containerID="29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253" Sep 30 12:30:37 crc kubenswrapper[5002]: E0930 12:30:37.204543 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\": container with ID starting with 29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253 not found: ID does not exist" containerID="29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.204569 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253"} err="failed to get container status \"29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\": rpc error: code = NotFound desc = could not find container \"29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\": container with ID starting with 29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.204585 5002 scope.go:117] "RemoveContainer" containerID="f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.204828 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f"} err="failed to get container status \"f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f\": rpc error: code = NotFound desc = could not find container \"f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f\": container with ID starting with f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.204854 5002 scope.go:117] "RemoveContainer" containerID="9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.205061 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5"} err="failed to get container status \"9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5\": rpc error: code = NotFound desc = could not find container \"9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5\": container with ID starting with 9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.205083 5002 scope.go:117] "RemoveContainer" containerID="6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.205305 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411"} err="failed to get container status \"6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\": rpc error: code = NotFound desc = could not find container \"6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\": container with ID starting with 6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.205329 5002 scope.go:117] "RemoveContainer" containerID="d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.205727 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f"} err="failed to get container status \"d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\": rpc error: code = NotFound desc = could not find container \"d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\": container with ID starting with d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.205755 5002 scope.go:117] "RemoveContainer" containerID="ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.205985 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6"} err="failed to get container status \"ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\": rpc error: code = NotFound desc = could not find container \"ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\": container with ID starting with ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.206035 5002 scope.go:117] "RemoveContainer" containerID="89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.206265 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9"} err="failed to get container status \"89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\": rpc error: code = NotFound desc = could not find container \"89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\": container with ID starting with 89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.206309 5002 scope.go:117] "RemoveContainer" containerID="8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.206623 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170"} err="failed to get container status \"8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\": rpc error: code = NotFound desc = could not find container \"8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\": container with ID starting with 8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.206669 5002 scope.go:117] "RemoveContainer" containerID="bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.206950 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4"} err="failed to get container status \"bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\": rpc error: code = NotFound desc = could not find container \"bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\": container with ID starting with bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.206974 5002 scope.go:117] "RemoveContainer" containerID="a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.207241 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9"} err="failed to get container status \"a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\": rpc error: code = NotFound desc = could not find container \"a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\": container with ID starting with a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.207269 5002 scope.go:117] "RemoveContainer" containerID="29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.207522 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253"} err="failed to get container status \"29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\": rpc error: code = NotFound desc = could not find container \"29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\": container with ID starting with 29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.207546 5002 scope.go:117] "RemoveContainer" containerID="f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.207859 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f"} err="failed to get container status \"f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f\": rpc error: code = NotFound desc = could not find container \"f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f\": container with ID starting with f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.207887 5002 scope.go:117] "RemoveContainer" containerID="9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.208323 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5"} err="failed to get container status \"9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5\": rpc error: code = NotFound desc = could not find container \"9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5\": container with ID starting with 9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.208370 5002 scope.go:117] "RemoveContainer" containerID="6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.208899 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411"} err="failed to get container status \"6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\": rpc error: code = NotFound desc = could not find container \"6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\": container with ID starting with 6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.208926 5002 scope.go:117] "RemoveContainer" containerID="d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.209259 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f"} err="failed to get container status \"d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\": rpc error: code = NotFound desc = could not find container \"d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\": container with ID starting with d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.209309 5002 scope.go:117] "RemoveContainer" containerID="ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.209689 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6"} err="failed to get container status \"ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\": rpc error: code = NotFound desc = could not find container \"ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\": container with ID starting with ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.209721 5002 scope.go:117] "RemoveContainer" containerID="89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.209985 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9"} err="failed to get container status \"89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\": rpc error: code = NotFound desc = could not find container \"89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\": container with ID starting with 89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.210007 5002 scope.go:117] "RemoveContainer" containerID="8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.210267 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170"} err="failed to get container status \"8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\": rpc error: code = NotFound desc = could not find container \"8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\": container with ID starting with 8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.210296 5002 scope.go:117] "RemoveContainer" containerID="bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.210591 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4"} err="failed to get container status \"bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\": rpc error: code = NotFound desc = could not find container \"bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\": container with ID starting with bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.210615 5002 scope.go:117] "RemoveContainer" containerID="a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.210880 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9"} err="failed to get container status \"a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\": rpc error: code = NotFound desc = could not find container \"a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\": container with ID starting with a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.210907 5002 scope.go:117] "RemoveContainer" containerID="29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.211124 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253"} err="failed to get container status \"29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\": rpc error: code = NotFound desc = could not find container \"29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\": container with ID starting with 29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.211145 5002 scope.go:117] "RemoveContainer" containerID="f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.211380 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f"} err="failed to get container status \"f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f\": rpc error: code = NotFound desc = could not find container \"f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f\": container with ID starting with f2b1316880efb368c068d7979dd622f56942ad0268acb39147dcd8131884428f not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.211404 5002 scope.go:117] "RemoveContainer" containerID="9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.211641 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5"} err="failed to get container status \"9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5\": rpc error: code = NotFound desc = could not find container \"9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5\": container with ID starting with 9a9a8ff35878c8516cf03a6425a099adbb9e94d1feae510064c59f89b051fcb5 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.211664 5002 scope.go:117] "RemoveContainer" containerID="6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.211895 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411"} err="failed to get container status \"6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\": rpc error: code = NotFound desc = could not find container \"6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411\": container with ID starting with 6ce6227a7ac3ced1fbd4814039859e25ff52f314aa8479275f1bfc49b04cf411 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.211922 5002 scope.go:117] "RemoveContainer" containerID="d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.212401 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f"} err="failed to get container status \"d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\": rpc error: code = NotFound desc = could not find container \"d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f\": container with ID starting with d41b04f928f3b4423bf4aa1babfd6292b749752d31ac57771b36bed503312d3f not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.212421 5002 scope.go:117] "RemoveContainer" containerID="ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.212653 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6"} err="failed to get container status \"ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\": rpc error: code = NotFound desc = could not find container \"ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6\": container with ID starting with ceb946c94ca247c05c57001754a73122b1bb98a98d886df9d64b2e4a003a4cf6 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.212681 5002 scope.go:117] "RemoveContainer" containerID="89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.212863 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9"} err="failed to get container status \"89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\": rpc error: code = NotFound desc = could not find container \"89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9\": container with ID starting with 89dbc048c1483376e11a6754384e9ea154767a88f7dfb3463809a2554bb142b9 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.212882 5002 scope.go:117] "RemoveContainer" containerID="8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.213128 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170"} err="failed to get container status \"8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\": rpc error: code = NotFound desc = could not find container \"8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170\": container with ID starting with 8b95267743f525eb5c8712304a1dc7afe4403b2065c0a3b594d95dcba6348170 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.213154 5002 scope.go:117] "RemoveContainer" containerID="bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.213344 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4"} err="failed to get container status \"bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\": rpc error: code = NotFound desc = could not find container \"bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4\": container with ID starting with bfbe7b62a4f0a00896ac4608d70c4ef6dc7675e0312f85ca181fcf1087fe73e4 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.213364 5002 scope.go:117] "RemoveContainer" containerID="a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.213601 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9"} err="failed to get container status \"a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\": rpc error: code = NotFound desc = could not find container \"a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9\": container with ID starting with a65c3c1af3f1ee07c2f5350011d1f5eb61dfe0b21ff13d7326373d65ffe603c9 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.213628 5002 scope.go:117] "RemoveContainer" containerID="29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.213845 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253"} err="failed to get container status \"29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\": rpc error: code = NotFound desc = could not find container \"29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253\": container with ID starting with 29ad21e0858ff157cc785f60f949fb08082938850870cc2c5198447f1e1d9253 not found: ID does not exist" Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.287950 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:37 crc kubenswrapper[5002]: W0930 12:30:37.313430 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd64c51bd_c4d1_44a3_8e4c_d5df0a5511d6.slice/crio-e3c86dc0849967632d0e8f4b8e3b593e43f56cfcd5aaf9728733456c73fbc3ed WatchSource:0}: Error finding container e3c86dc0849967632d0e8f4b8e3b593e43f56cfcd5aaf9728733456c73fbc3ed: Status 404 returned error can't find the container with id e3c86dc0849967632d0e8f4b8e3b593e43f56cfcd5aaf9728733456c73fbc3ed Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.358143 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4pvsr"] Sep 30 12:30:37 crc kubenswrapper[5002]: I0930 12:30:37.362299 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4pvsr"] Sep 30 12:30:38 crc kubenswrapper[5002]: I0930 12:30:38.021720 5002 generic.go:334] "Generic (PLEG): container finished" podID="d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6" containerID="6e9549542c858f604c716bd0b8da1933804e294d47252f2f0115c0b251488915" exitCode=0 Sep 30 12:30:38 crc kubenswrapper[5002]: I0930 12:30:38.021772 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" event={"ID":"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6","Type":"ContainerDied","Data":"6e9549542c858f604c716bd0b8da1933804e294d47252f2f0115c0b251488915"} Sep 30 12:30:38 crc kubenswrapper[5002]: I0930 12:30:38.021855 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" event={"ID":"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6","Type":"ContainerStarted","Data":"e3c86dc0849967632d0e8f4b8e3b593e43f56cfcd5aaf9728733456c73fbc3ed"} Sep 30 12:30:38 crc kubenswrapper[5002]: I0930 12:30:38.023613 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ttfn8_2bd9f18d-bfb3-4bd7-9a87-242029cd3200/kube-multus/2.log" Sep 30 12:30:38 crc kubenswrapper[5002]: I0930 12:30:38.691904 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7095aa7a-d067-4977-bdc5-3a45a52a6a39" path="/var/lib/kubelet/pods/7095aa7a-d067-4977-bdc5-3a45a52a6a39/volumes" Sep 30 12:30:39 crc kubenswrapper[5002]: I0930 12:30:39.036088 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" event={"ID":"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6","Type":"ContainerStarted","Data":"9dd812fd8eff38d3afc8bcfa79132e210331b06e5c32ead0abd0867de60eeb06"} Sep 30 12:30:39 crc kubenswrapper[5002]: I0930 12:30:39.037765 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" event={"ID":"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6","Type":"ContainerStarted","Data":"d1cde0a0ceea245e64bbc564d247c744e556535286eb8c795cc5be35b1c598dc"} Sep 30 12:30:39 crc kubenswrapper[5002]: I0930 12:30:39.037960 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" event={"ID":"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6","Type":"ContainerStarted","Data":"a207c957dd90672bb37b3f1517fc6d62caf8d4ba294e755f459b2e83b948ba9a"} Sep 30 12:30:39 crc kubenswrapper[5002]: I0930 12:30:39.038156 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" event={"ID":"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6","Type":"ContainerStarted","Data":"d9451c2a4939252fc46b61ebc49d01439d1b0e36ec72c2ba05a5d88bdf78b0bf"} Sep 30 12:30:39 crc kubenswrapper[5002]: I0930 12:30:39.038300 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" event={"ID":"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6","Type":"ContainerStarted","Data":"5f42af7333e797fa2d5605a4fa473dae094a7f75cb9154e758200da64577fa19"} Sep 30 12:30:39 crc kubenswrapper[5002]: I0930 12:30:39.038422 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" event={"ID":"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6","Type":"ContainerStarted","Data":"39d86921883f57947c7dc35a8c83a7773bf2961757e914e8e1de1ab34322f94a"} Sep 30 12:30:41 crc kubenswrapper[5002]: I0930 12:30:41.401894 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-h79vx" Sep 30 12:30:42 crc kubenswrapper[5002]: I0930 12:30:42.066784 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" event={"ID":"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6","Type":"ContainerStarted","Data":"7f31011d390e3986668963b27b857da270f0c178e12796afd806628787deca55"} Sep 30 12:30:44 crc kubenswrapper[5002]: I0930 12:30:44.082422 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" event={"ID":"d64c51bd-c4d1-44a3-8e4c-d5df0a5511d6","Type":"ContainerStarted","Data":"e552104a9fe5b54f75f4b9abc215f6fec613f1f5819fe9e7b21c582d0f41b3d6"} Sep 30 12:30:44 crc kubenswrapper[5002]: I0930 12:30:44.082942 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:44 crc kubenswrapper[5002]: I0930 12:30:44.082956 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:44 crc kubenswrapper[5002]: I0930 12:30:44.082964 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:44 crc kubenswrapper[5002]: I0930 12:30:44.114852 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:44 crc kubenswrapper[5002]: I0930 12:30:44.134297 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" podStartSLOduration=8.134267491 podStartE2EDuration="8.134267491s" podCreationTimestamp="2025-09-30 12:30:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:30:44.116863564 +0000 UTC m=+618.366545720" watchObservedRunningTime="2025-09-30 12:30:44.134267491 +0000 UTC m=+618.383949667" Sep 30 12:30:44 crc kubenswrapper[5002]: I0930 12:30:44.136596 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:30:47 crc kubenswrapper[5002]: I0930 12:30:47.676339 5002 scope.go:117] "RemoveContainer" containerID="c5f7e86f7e2139fe8fdc7a6d09f418a223f9da04a6950e5dec018265268c7de5" Sep 30 12:30:47 crc kubenswrapper[5002]: E0930 12:30:47.678735 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-ttfn8_openshift-multus(2bd9f18d-bfb3-4bd7-9a87-242029cd3200)\"" pod="openshift-multus/multus-ttfn8" podUID="2bd9f18d-bfb3-4bd7-9a87-242029cd3200" Sep 30 12:31:00 crc kubenswrapper[5002]: I0930 12:31:00.676359 5002 scope.go:117] "RemoveContainer" containerID="c5f7e86f7e2139fe8fdc7a6d09f418a223f9da04a6950e5dec018265268c7de5" Sep 30 12:31:01 crc kubenswrapper[5002]: I0930 12:31:01.186230 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ttfn8_2bd9f18d-bfb3-4bd7-9a87-242029cd3200/kube-multus/2.log" Sep 30 12:31:01 crc kubenswrapper[5002]: I0930 12:31:01.186543 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-ttfn8" event={"ID":"2bd9f18d-bfb3-4bd7-9a87-242029cd3200","Type":"ContainerStarted","Data":"cf997f5cd46ac1ad4cf1f4df42e9e2b22e7b6816b8e645a0d59ff92b1bc425eb"} Sep 30 12:31:07 crc kubenswrapper[5002]: I0930 12:31:07.338120 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4lk28" Sep 30 12:31:25 crc kubenswrapper[5002]: I0930 12:31:25.667740 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg"] Sep 30 12:31:25 crc kubenswrapper[5002]: I0930 12:31:25.671626 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg" Sep 30 12:31:25 crc kubenswrapper[5002]: I0930 12:31:25.678668 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 30 12:31:25 crc kubenswrapper[5002]: I0930 12:31:25.681632 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg"] Sep 30 12:31:25 crc kubenswrapper[5002]: I0930 12:31:25.814659 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8eafa11f-bdad-4035-82be-1fe0e27a0282-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg\" (UID: \"8eafa11f-bdad-4035-82be-1fe0e27a0282\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg" Sep 30 12:31:25 crc kubenswrapper[5002]: I0930 12:31:25.814739 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8eafa11f-bdad-4035-82be-1fe0e27a0282-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg\" (UID: \"8eafa11f-bdad-4035-82be-1fe0e27a0282\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg" Sep 30 12:31:25 crc kubenswrapper[5002]: I0930 12:31:25.814776 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7d2m9\" (UniqueName: \"kubernetes.io/projected/8eafa11f-bdad-4035-82be-1fe0e27a0282-kube-api-access-7d2m9\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg\" (UID: \"8eafa11f-bdad-4035-82be-1fe0e27a0282\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg" Sep 30 12:31:25 crc kubenswrapper[5002]: I0930 12:31:25.916545 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8eafa11f-bdad-4035-82be-1fe0e27a0282-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg\" (UID: \"8eafa11f-bdad-4035-82be-1fe0e27a0282\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg" Sep 30 12:31:25 crc kubenswrapper[5002]: I0930 12:31:25.916632 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8eafa11f-bdad-4035-82be-1fe0e27a0282-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg\" (UID: \"8eafa11f-bdad-4035-82be-1fe0e27a0282\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg" Sep 30 12:31:25 crc kubenswrapper[5002]: I0930 12:31:25.916673 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7d2m9\" (UniqueName: \"kubernetes.io/projected/8eafa11f-bdad-4035-82be-1fe0e27a0282-kube-api-access-7d2m9\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg\" (UID: \"8eafa11f-bdad-4035-82be-1fe0e27a0282\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg" Sep 30 12:31:25 crc kubenswrapper[5002]: I0930 12:31:25.917578 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8eafa11f-bdad-4035-82be-1fe0e27a0282-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg\" (UID: \"8eafa11f-bdad-4035-82be-1fe0e27a0282\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg" Sep 30 12:31:25 crc kubenswrapper[5002]: I0930 12:31:25.917604 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8eafa11f-bdad-4035-82be-1fe0e27a0282-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg\" (UID: \"8eafa11f-bdad-4035-82be-1fe0e27a0282\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg" Sep 30 12:31:25 crc kubenswrapper[5002]: I0930 12:31:25.956897 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7d2m9\" (UniqueName: \"kubernetes.io/projected/8eafa11f-bdad-4035-82be-1fe0e27a0282-kube-api-access-7d2m9\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg\" (UID: \"8eafa11f-bdad-4035-82be-1fe0e27a0282\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg" Sep 30 12:31:25 crc kubenswrapper[5002]: I0930 12:31:25.992882 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg" Sep 30 12:31:26 crc kubenswrapper[5002]: I0930 12:31:26.246158 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg"] Sep 30 12:31:26 crc kubenswrapper[5002]: I0930 12:31:26.350785 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg" event={"ID":"8eafa11f-bdad-4035-82be-1fe0e27a0282","Type":"ContainerStarted","Data":"c03196b298940e64638dcbcf8d989552f8459419cc861942e8d6e1d2494dff5a"} Sep 30 12:31:27 crc kubenswrapper[5002]: I0930 12:31:27.356118 5002 generic.go:334] "Generic (PLEG): container finished" podID="8eafa11f-bdad-4035-82be-1fe0e27a0282" containerID="58183aa9f676e1ef690d5b5d5bbb20d210152f41b5c546a81606adb410e69e20" exitCode=0 Sep 30 12:31:27 crc kubenswrapper[5002]: I0930 12:31:27.356319 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg" event={"ID":"8eafa11f-bdad-4035-82be-1fe0e27a0282","Type":"ContainerDied","Data":"58183aa9f676e1ef690d5b5d5bbb20d210152f41b5c546a81606adb410e69e20"} Sep 30 12:31:30 crc kubenswrapper[5002]: I0930 12:31:30.388728 5002 generic.go:334] "Generic (PLEG): container finished" podID="8eafa11f-bdad-4035-82be-1fe0e27a0282" containerID="99b1b29ec5450b3ad759ee56623a90af912221ef07e9111f4dbe317d3ac5bc9d" exitCode=0 Sep 30 12:31:30 crc kubenswrapper[5002]: I0930 12:31:30.388825 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg" event={"ID":"8eafa11f-bdad-4035-82be-1fe0e27a0282","Type":"ContainerDied","Data":"99b1b29ec5450b3ad759ee56623a90af912221ef07e9111f4dbe317d3ac5bc9d"} Sep 30 12:31:31 crc kubenswrapper[5002]: I0930 12:31:31.397003 5002 generic.go:334] "Generic (PLEG): container finished" podID="8eafa11f-bdad-4035-82be-1fe0e27a0282" containerID="46212a10a3edcc0c47a387cddc0613bd0c316c2c5aa096d07e0b25319c1d8853" exitCode=0 Sep 30 12:31:31 crc kubenswrapper[5002]: I0930 12:31:31.397376 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg" event={"ID":"8eafa11f-bdad-4035-82be-1fe0e27a0282","Type":"ContainerDied","Data":"46212a10a3edcc0c47a387cddc0613bd0c316c2c5aa096d07e0b25319c1d8853"} Sep 30 12:31:32 crc kubenswrapper[5002]: I0930 12:31:32.713892 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg" Sep 30 12:31:32 crc kubenswrapper[5002]: I0930 12:31:32.814526 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7d2m9\" (UniqueName: \"kubernetes.io/projected/8eafa11f-bdad-4035-82be-1fe0e27a0282-kube-api-access-7d2m9\") pod \"8eafa11f-bdad-4035-82be-1fe0e27a0282\" (UID: \"8eafa11f-bdad-4035-82be-1fe0e27a0282\") " Sep 30 12:31:32 crc kubenswrapper[5002]: I0930 12:31:32.814655 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8eafa11f-bdad-4035-82be-1fe0e27a0282-util\") pod \"8eafa11f-bdad-4035-82be-1fe0e27a0282\" (UID: \"8eafa11f-bdad-4035-82be-1fe0e27a0282\") " Sep 30 12:31:32 crc kubenswrapper[5002]: I0930 12:31:32.814703 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8eafa11f-bdad-4035-82be-1fe0e27a0282-bundle\") pod \"8eafa11f-bdad-4035-82be-1fe0e27a0282\" (UID: \"8eafa11f-bdad-4035-82be-1fe0e27a0282\") " Sep 30 12:31:32 crc kubenswrapper[5002]: I0930 12:31:32.815872 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8eafa11f-bdad-4035-82be-1fe0e27a0282-bundle" (OuterVolumeSpecName: "bundle") pod "8eafa11f-bdad-4035-82be-1fe0e27a0282" (UID: "8eafa11f-bdad-4035-82be-1fe0e27a0282"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:31:32 crc kubenswrapper[5002]: I0930 12:31:32.823846 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8eafa11f-bdad-4035-82be-1fe0e27a0282-kube-api-access-7d2m9" (OuterVolumeSpecName: "kube-api-access-7d2m9") pod "8eafa11f-bdad-4035-82be-1fe0e27a0282" (UID: "8eafa11f-bdad-4035-82be-1fe0e27a0282"). InnerVolumeSpecName "kube-api-access-7d2m9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:31:32 crc kubenswrapper[5002]: I0930 12:31:32.825665 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8eafa11f-bdad-4035-82be-1fe0e27a0282-util" (OuterVolumeSpecName: "util") pod "8eafa11f-bdad-4035-82be-1fe0e27a0282" (UID: "8eafa11f-bdad-4035-82be-1fe0e27a0282"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:31:32 crc kubenswrapper[5002]: I0930 12:31:32.915642 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7d2m9\" (UniqueName: \"kubernetes.io/projected/8eafa11f-bdad-4035-82be-1fe0e27a0282-kube-api-access-7d2m9\") on node \"crc\" DevicePath \"\"" Sep 30 12:31:32 crc kubenswrapper[5002]: I0930 12:31:32.915673 5002 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8eafa11f-bdad-4035-82be-1fe0e27a0282-util\") on node \"crc\" DevicePath \"\"" Sep 30 12:31:32 crc kubenswrapper[5002]: I0930 12:31:32.915684 5002 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8eafa11f-bdad-4035-82be-1fe0e27a0282-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:31:33 crc kubenswrapper[5002]: I0930 12:31:33.416289 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg" event={"ID":"8eafa11f-bdad-4035-82be-1fe0e27a0282","Type":"ContainerDied","Data":"c03196b298940e64638dcbcf8d989552f8459419cc861942e8d6e1d2494dff5a"} Sep 30 12:31:33 crc kubenswrapper[5002]: I0930 12:31:33.416332 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c03196b298940e64638dcbcf8d989552f8459419cc861942e8d6e1d2494dff5a" Sep 30 12:31:33 crc kubenswrapper[5002]: I0930 12:31:33.416434 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg" Sep 30 12:31:34 crc kubenswrapper[5002]: I0930 12:31:34.665871 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-45rf2"] Sep 30 12:31:34 crc kubenswrapper[5002]: E0930 12:31:34.666107 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8eafa11f-bdad-4035-82be-1fe0e27a0282" containerName="util" Sep 30 12:31:34 crc kubenswrapper[5002]: I0930 12:31:34.666121 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="8eafa11f-bdad-4035-82be-1fe0e27a0282" containerName="util" Sep 30 12:31:34 crc kubenswrapper[5002]: E0930 12:31:34.666141 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8eafa11f-bdad-4035-82be-1fe0e27a0282" containerName="extract" Sep 30 12:31:34 crc kubenswrapper[5002]: I0930 12:31:34.666149 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="8eafa11f-bdad-4035-82be-1fe0e27a0282" containerName="extract" Sep 30 12:31:34 crc kubenswrapper[5002]: E0930 12:31:34.666166 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8eafa11f-bdad-4035-82be-1fe0e27a0282" containerName="pull" Sep 30 12:31:34 crc kubenswrapper[5002]: I0930 12:31:34.666175 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="8eafa11f-bdad-4035-82be-1fe0e27a0282" containerName="pull" Sep 30 12:31:34 crc kubenswrapper[5002]: I0930 12:31:34.666312 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="8eafa11f-bdad-4035-82be-1fe0e27a0282" containerName="extract" Sep 30 12:31:34 crc kubenswrapper[5002]: I0930 12:31:34.666829 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-45rf2" Sep 30 12:31:34 crc kubenswrapper[5002]: I0930 12:31:34.669067 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Sep 30 12:31:34 crc kubenswrapper[5002]: I0930 12:31:34.669124 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Sep 30 12:31:34 crc kubenswrapper[5002]: I0930 12:31:34.680901 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-9lmn9" Sep 30 12:31:34 crc kubenswrapper[5002]: I0930 12:31:34.687634 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-45rf2"] Sep 30 12:31:34 crc kubenswrapper[5002]: I0930 12:31:34.740398 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqh2q\" (UniqueName: \"kubernetes.io/projected/f5de2a34-58db-4513-a544-baac53a6ee7f-kube-api-access-kqh2q\") pod \"nmstate-operator-5d6f6cfd66-45rf2\" (UID: \"f5de2a34-58db-4513-a544-baac53a6ee7f\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-45rf2" Sep 30 12:31:34 crc kubenswrapper[5002]: I0930 12:31:34.841589 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqh2q\" (UniqueName: \"kubernetes.io/projected/f5de2a34-58db-4513-a544-baac53a6ee7f-kube-api-access-kqh2q\") pod \"nmstate-operator-5d6f6cfd66-45rf2\" (UID: \"f5de2a34-58db-4513-a544-baac53a6ee7f\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-45rf2" Sep 30 12:31:34 crc kubenswrapper[5002]: I0930 12:31:34.866674 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqh2q\" (UniqueName: \"kubernetes.io/projected/f5de2a34-58db-4513-a544-baac53a6ee7f-kube-api-access-kqh2q\") pod \"nmstate-operator-5d6f6cfd66-45rf2\" (UID: \"f5de2a34-58db-4513-a544-baac53a6ee7f\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-45rf2" Sep 30 12:31:34 crc kubenswrapper[5002]: I0930 12:31:34.983965 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-45rf2" Sep 30 12:31:35 crc kubenswrapper[5002]: I0930 12:31:35.182206 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-45rf2"] Sep 30 12:31:35 crc kubenswrapper[5002]: I0930 12:31:35.425878 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-45rf2" event={"ID":"f5de2a34-58db-4513-a544-baac53a6ee7f","Type":"ContainerStarted","Data":"ba6b73c7717c4a8db27d526633fc16ece12064e1a3a651b441619abae725393e"} Sep 30 12:31:38 crc kubenswrapper[5002]: I0930 12:31:38.445530 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-45rf2" event={"ID":"f5de2a34-58db-4513-a544-baac53a6ee7f","Type":"ContainerStarted","Data":"01f756c6bb4f144649776271afd0c75507ee32373eccbe16d58fb9a5df1587cb"} Sep 30 12:31:38 crc kubenswrapper[5002]: I0930 12:31:38.464524 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-45rf2" podStartSLOduration=2.140598175 podStartE2EDuration="4.464505291s" podCreationTimestamp="2025-09-30 12:31:34 +0000 UTC" firstStartedPulling="2025-09-30 12:31:35.197683365 +0000 UTC m=+669.447365511" lastFinishedPulling="2025-09-30 12:31:37.521590481 +0000 UTC m=+671.771272627" observedRunningTime="2025-09-30 12:31:38.45733165 +0000 UTC m=+672.707013816" watchObservedRunningTime="2025-09-30 12:31:38.464505291 +0000 UTC m=+672.714187427" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.383110 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-w7bgk"] Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.384416 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-w7bgk" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.388803 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-5s89s" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.399685 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-4s6bl"] Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.400778 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-4s6bl" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.404154 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-w7bgk"] Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.420163 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-5q5mt"] Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.421066 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-5q5mt" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.425844 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.434545 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-5q5mt"] Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.507384 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/7a89ecb4-494a-40f2-8e0c-871b2c94f8a2-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-5q5mt\" (UID: \"7a89ecb4-494a-40f2-8e0c-871b2c94f8a2\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-5q5mt" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.507465 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9m77\" (UniqueName: \"kubernetes.io/projected/54cfc79e-c203-4715-a7d4-0120f7577db6-kube-api-access-x9m77\") pod \"nmstate-metrics-58fcddf996-w7bgk\" (UID: \"54cfc79e-c203-4715-a7d4-0120f7577db6\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-w7bgk" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.507524 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8dk8\" (UniqueName: \"kubernetes.io/projected/7a89ecb4-494a-40f2-8e0c-871b2c94f8a2-kube-api-access-f8dk8\") pod \"nmstate-webhook-6d689559c5-5q5mt\" (UID: \"7a89ecb4-494a-40f2-8e0c-871b2c94f8a2\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-5q5mt" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.507549 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/b3b353e1-b31e-45ba-b22e-6e78fd291203-nmstate-lock\") pod \"nmstate-handler-4s6bl\" (UID: \"b3b353e1-b31e-45ba-b22e-6e78fd291203\") " pod="openshift-nmstate/nmstate-handler-4s6bl" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.507620 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/b3b353e1-b31e-45ba-b22e-6e78fd291203-dbus-socket\") pod \"nmstate-handler-4s6bl\" (UID: \"b3b353e1-b31e-45ba-b22e-6e78fd291203\") " pod="openshift-nmstate/nmstate-handler-4s6bl" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.507664 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/b3b353e1-b31e-45ba-b22e-6e78fd291203-ovs-socket\") pod \"nmstate-handler-4s6bl\" (UID: \"b3b353e1-b31e-45ba-b22e-6e78fd291203\") " pod="openshift-nmstate/nmstate-handler-4s6bl" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.507696 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqjhl\" (UniqueName: \"kubernetes.io/projected/b3b353e1-b31e-45ba-b22e-6e78fd291203-kube-api-access-wqjhl\") pod \"nmstate-handler-4s6bl\" (UID: \"b3b353e1-b31e-45ba-b22e-6e78fd291203\") " pod="openshift-nmstate/nmstate-handler-4s6bl" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.510743 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-p8l92"] Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.511375 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-p8l92" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.513177 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-fdfrc" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.513373 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.513530 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.562897 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-p8l92"] Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.608818 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/b3b353e1-b31e-45ba-b22e-6e78fd291203-nmstate-lock\") pod \"nmstate-handler-4s6bl\" (UID: \"b3b353e1-b31e-45ba-b22e-6e78fd291203\") " pod="openshift-nmstate/nmstate-handler-4s6bl" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.608865 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/b3b353e1-b31e-45ba-b22e-6e78fd291203-dbus-socket\") pod \"nmstate-handler-4s6bl\" (UID: \"b3b353e1-b31e-45ba-b22e-6e78fd291203\") " pod="openshift-nmstate/nmstate-handler-4s6bl" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.608884 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/b3b353e1-b31e-45ba-b22e-6e78fd291203-ovs-socket\") pod \"nmstate-handler-4s6bl\" (UID: \"b3b353e1-b31e-45ba-b22e-6e78fd291203\") " pod="openshift-nmstate/nmstate-handler-4s6bl" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.608908 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/c350ea0c-aa9c-4ccb-9607-aeff49e295b1-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-p8l92\" (UID: \"c350ea0c-aa9c-4ccb-9607-aeff49e295b1\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-p8l92" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.608928 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqjhl\" (UniqueName: \"kubernetes.io/projected/b3b353e1-b31e-45ba-b22e-6e78fd291203-kube-api-access-wqjhl\") pod \"nmstate-handler-4s6bl\" (UID: \"b3b353e1-b31e-45ba-b22e-6e78fd291203\") " pod="openshift-nmstate/nmstate-handler-4s6bl" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.608950 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/7a89ecb4-494a-40f2-8e0c-871b2c94f8a2-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-5q5mt\" (UID: \"7a89ecb4-494a-40f2-8e0c-871b2c94f8a2\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-5q5mt" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.608977 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/c350ea0c-aa9c-4ccb-9607-aeff49e295b1-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-p8l92\" (UID: \"c350ea0c-aa9c-4ccb-9607-aeff49e295b1\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-p8l92" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.608980 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/b3b353e1-b31e-45ba-b22e-6e78fd291203-nmstate-lock\") pod \"nmstate-handler-4s6bl\" (UID: \"b3b353e1-b31e-45ba-b22e-6e78fd291203\") " pod="openshift-nmstate/nmstate-handler-4s6bl" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.608996 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgq4f\" (UniqueName: \"kubernetes.io/projected/c350ea0c-aa9c-4ccb-9607-aeff49e295b1-kube-api-access-fgq4f\") pod \"nmstate-console-plugin-864bb6dfb5-p8l92\" (UID: \"c350ea0c-aa9c-4ccb-9607-aeff49e295b1\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-p8l92" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.609095 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/b3b353e1-b31e-45ba-b22e-6e78fd291203-ovs-socket\") pod \"nmstate-handler-4s6bl\" (UID: \"b3b353e1-b31e-45ba-b22e-6e78fd291203\") " pod="openshift-nmstate/nmstate-handler-4s6bl" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.609110 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9m77\" (UniqueName: \"kubernetes.io/projected/54cfc79e-c203-4715-a7d4-0120f7577db6-kube-api-access-x9m77\") pod \"nmstate-metrics-58fcddf996-w7bgk\" (UID: \"54cfc79e-c203-4715-a7d4-0120f7577db6\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-w7bgk" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.609187 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/b3b353e1-b31e-45ba-b22e-6e78fd291203-dbus-socket\") pod \"nmstate-handler-4s6bl\" (UID: \"b3b353e1-b31e-45ba-b22e-6e78fd291203\") " pod="openshift-nmstate/nmstate-handler-4s6bl" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.609240 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8dk8\" (UniqueName: \"kubernetes.io/projected/7a89ecb4-494a-40f2-8e0c-871b2c94f8a2-kube-api-access-f8dk8\") pod \"nmstate-webhook-6d689559c5-5q5mt\" (UID: \"7a89ecb4-494a-40f2-8e0c-871b2c94f8a2\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-5q5mt" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.615164 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/7a89ecb4-494a-40f2-8e0c-871b2c94f8a2-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-5q5mt\" (UID: \"7a89ecb4-494a-40f2-8e0c-871b2c94f8a2\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-5q5mt" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.628195 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8dk8\" (UniqueName: \"kubernetes.io/projected/7a89ecb4-494a-40f2-8e0c-871b2c94f8a2-kube-api-access-f8dk8\") pod \"nmstate-webhook-6d689559c5-5q5mt\" (UID: \"7a89ecb4-494a-40f2-8e0c-871b2c94f8a2\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-5q5mt" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.634099 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9m77\" (UniqueName: \"kubernetes.io/projected/54cfc79e-c203-4715-a7d4-0120f7577db6-kube-api-access-x9m77\") pod \"nmstate-metrics-58fcddf996-w7bgk\" (UID: \"54cfc79e-c203-4715-a7d4-0120f7577db6\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-w7bgk" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.635423 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqjhl\" (UniqueName: \"kubernetes.io/projected/b3b353e1-b31e-45ba-b22e-6e78fd291203-kube-api-access-wqjhl\") pod \"nmstate-handler-4s6bl\" (UID: \"b3b353e1-b31e-45ba-b22e-6e78fd291203\") " pod="openshift-nmstate/nmstate-handler-4s6bl" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.709899 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-564c9f6756-ntpp5"] Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.710375 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/c350ea0c-aa9c-4ccb-9607-aeff49e295b1-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-p8l92\" (UID: \"c350ea0c-aa9c-4ccb-9607-aeff49e295b1\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-p8l92" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.710432 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/c350ea0c-aa9c-4ccb-9607-aeff49e295b1-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-p8l92\" (UID: \"c350ea0c-aa9c-4ccb-9607-aeff49e295b1\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-p8l92" Sep 30 12:31:39 crc kubenswrapper[5002]: E0930 12:31:39.710572 5002 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Sep 30 12:31:39 crc kubenswrapper[5002]: E0930 12:31:39.710634 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c350ea0c-aa9c-4ccb-9607-aeff49e295b1-plugin-serving-cert podName:c350ea0c-aa9c-4ccb-9607-aeff49e295b1 nodeName:}" failed. No retries permitted until 2025-09-30 12:31:40.210616642 +0000 UTC m=+674.460298788 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/c350ea0c-aa9c-4ccb-9607-aeff49e295b1-plugin-serving-cert") pod "nmstate-console-plugin-864bb6dfb5-p8l92" (UID: "c350ea0c-aa9c-4ccb-9607-aeff49e295b1") : secret "plugin-serving-cert" not found Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.710900 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgq4f\" (UniqueName: \"kubernetes.io/projected/c350ea0c-aa9c-4ccb-9607-aeff49e295b1-kube-api-access-fgq4f\") pod \"nmstate-console-plugin-864bb6dfb5-p8l92\" (UID: \"c350ea0c-aa9c-4ccb-9607-aeff49e295b1\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-p8l92" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.711182 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-w7bgk" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.711463 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/c350ea0c-aa9c-4ccb-9607-aeff49e295b1-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-p8l92\" (UID: \"c350ea0c-aa9c-4ccb-9607-aeff49e295b1\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-p8l92" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.711710 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-564c9f6756-ntpp5" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.730782 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-564c9f6756-ntpp5"] Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.738466 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-4s6bl" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.742240 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgq4f\" (UniqueName: \"kubernetes.io/projected/c350ea0c-aa9c-4ccb-9607-aeff49e295b1-kube-api-access-fgq4f\") pod \"nmstate-console-plugin-864bb6dfb5-p8l92\" (UID: \"c350ea0c-aa9c-4ccb-9607-aeff49e295b1\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-p8l92" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.751210 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-5q5mt" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.812086 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f870ee28-51a3-49a5-9deb-02a6c66de926-oauth-serving-cert\") pod \"console-564c9f6756-ntpp5\" (UID: \"f870ee28-51a3-49a5-9deb-02a6c66de926\") " pod="openshift-console/console-564c9f6756-ntpp5" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.812325 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f870ee28-51a3-49a5-9deb-02a6c66de926-service-ca\") pod \"console-564c9f6756-ntpp5\" (UID: \"f870ee28-51a3-49a5-9deb-02a6c66de926\") " pod="openshift-console/console-564c9f6756-ntpp5" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.812356 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f870ee28-51a3-49a5-9deb-02a6c66de926-console-config\") pod \"console-564c9f6756-ntpp5\" (UID: \"f870ee28-51a3-49a5-9deb-02a6c66de926\") " pod="openshift-console/console-564c9f6756-ntpp5" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.812402 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f870ee28-51a3-49a5-9deb-02a6c66de926-trusted-ca-bundle\") pod \"console-564c9f6756-ntpp5\" (UID: \"f870ee28-51a3-49a5-9deb-02a6c66de926\") " pod="openshift-console/console-564c9f6756-ntpp5" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.812423 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f870ee28-51a3-49a5-9deb-02a6c66de926-console-oauth-config\") pod \"console-564c9f6756-ntpp5\" (UID: \"f870ee28-51a3-49a5-9deb-02a6c66de926\") " pod="openshift-console/console-564c9f6756-ntpp5" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.812444 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f870ee28-51a3-49a5-9deb-02a6c66de926-console-serving-cert\") pod \"console-564c9f6756-ntpp5\" (UID: \"f870ee28-51a3-49a5-9deb-02a6c66de926\") " pod="openshift-console/console-564c9f6756-ntpp5" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.812462 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jprjw\" (UniqueName: \"kubernetes.io/projected/f870ee28-51a3-49a5-9deb-02a6c66de926-kube-api-access-jprjw\") pod \"console-564c9f6756-ntpp5\" (UID: \"f870ee28-51a3-49a5-9deb-02a6c66de926\") " pod="openshift-console/console-564c9f6756-ntpp5" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.913298 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f870ee28-51a3-49a5-9deb-02a6c66de926-service-ca\") pod \"console-564c9f6756-ntpp5\" (UID: \"f870ee28-51a3-49a5-9deb-02a6c66de926\") " pod="openshift-console/console-564c9f6756-ntpp5" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.913354 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f870ee28-51a3-49a5-9deb-02a6c66de926-console-config\") pod \"console-564c9f6756-ntpp5\" (UID: \"f870ee28-51a3-49a5-9deb-02a6c66de926\") " pod="openshift-console/console-564c9f6756-ntpp5" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.913390 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f870ee28-51a3-49a5-9deb-02a6c66de926-trusted-ca-bundle\") pod \"console-564c9f6756-ntpp5\" (UID: \"f870ee28-51a3-49a5-9deb-02a6c66de926\") " pod="openshift-console/console-564c9f6756-ntpp5" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.913414 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f870ee28-51a3-49a5-9deb-02a6c66de926-console-oauth-config\") pod \"console-564c9f6756-ntpp5\" (UID: \"f870ee28-51a3-49a5-9deb-02a6c66de926\") " pod="openshift-console/console-564c9f6756-ntpp5" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.913435 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f870ee28-51a3-49a5-9deb-02a6c66de926-console-serving-cert\") pod \"console-564c9f6756-ntpp5\" (UID: \"f870ee28-51a3-49a5-9deb-02a6c66de926\") " pod="openshift-console/console-564c9f6756-ntpp5" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.913455 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jprjw\" (UniqueName: \"kubernetes.io/projected/f870ee28-51a3-49a5-9deb-02a6c66de926-kube-api-access-jprjw\") pod \"console-564c9f6756-ntpp5\" (UID: \"f870ee28-51a3-49a5-9deb-02a6c66de926\") " pod="openshift-console/console-564c9f6756-ntpp5" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.913515 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f870ee28-51a3-49a5-9deb-02a6c66de926-oauth-serving-cert\") pod \"console-564c9f6756-ntpp5\" (UID: \"f870ee28-51a3-49a5-9deb-02a6c66de926\") " pod="openshift-console/console-564c9f6756-ntpp5" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.914268 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f870ee28-51a3-49a5-9deb-02a6c66de926-oauth-serving-cert\") pod \"console-564c9f6756-ntpp5\" (UID: \"f870ee28-51a3-49a5-9deb-02a6c66de926\") " pod="openshift-console/console-564c9f6756-ntpp5" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.915143 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f870ee28-51a3-49a5-9deb-02a6c66de926-trusted-ca-bundle\") pod \"console-564c9f6756-ntpp5\" (UID: \"f870ee28-51a3-49a5-9deb-02a6c66de926\") " pod="openshift-console/console-564c9f6756-ntpp5" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.915808 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f870ee28-51a3-49a5-9deb-02a6c66de926-console-config\") pod \"console-564c9f6756-ntpp5\" (UID: \"f870ee28-51a3-49a5-9deb-02a6c66de926\") " pod="openshift-console/console-564c9f6756-ntpp5" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.915885 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f870ee28-51a3-49a5-9deb-02a6c66de926-service-ca\") pod \"console-564c9f6756-ntpp5\" (UID: \"f870ee28-51a3-49a5-9deb-02a6c66de926\") " pod="openshift-console/console-564c9f6756-ntpp5" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.917785 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f870ee28-51a3-49a5-9deb-02a6c66de926-console-serving-cert\") pod \"console-564c9f6756-ntpp5\" (UID: \"f870ee28-51a3-49a5-9deb-02a6c66de926\") " pod="openshift-console/console-564c9f6756-ntpp5" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.927629 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f870ee28-51a3-49a5-9deb-02a6c66de926-console-oauth-config\") pod \"console-564c9f6756-ntpp5\" (UID: \"f870ee28-51a3-49a5-9deb-02a6c66de926\") " pod="openshift-console/console-564c9f6756-ntpp5" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.929488 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jprjw\" (UniqueName: \"kubernetes.io/projected/f870ee28-51a3-49a5-9deb-02a6c66de926-kube-api-access-jprjw\") pod \"console-564c9f6756-ntpp5\" (UID: \"f870ee28-51a3-49a5-9deb-02a6c66de926\") " pod="openshift-console/console-564c9f6756-ntpp5" Sep 30 12:31:39 crc kubenswrapper[5002]: I0930 12:31:39.990039 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-5q5mt"] Sep 30 12:31:39 crc kubenswrapper[5002]: W0930 12:31:39.992169 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7a89ecb4_494a_40f2_8e0c_871b2c94f8a2.slice/crio-111521deef4f917799360c822acd63601aac3917dd189138f72c83c1fdc6bf22 WatchSource:0}: Error finding container 111521deef4f917799360c822acd63601aac3917dd189138f72c83c1fdc6bf22: Status 404 returned error can't find the container with id 111521deef4f917799360c822acd63601aac3917dd189138f72c83c1fdc6bf22 Sep 30 12:31:40 crc kubenswrapper[5002]: I0930 12:31:40.070151 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-564c9f6756-ntpp5" Sep 30 12:31:40 crc kubenswrapper[5002]: I0930 12:31:40.128021 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-w7bgk"] Sep 30 12:31:40 crc kubenswrapper[5002]: W0930 12:31:40.136291 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod54cfc79e_c203_4715_a7d4_0120f7577db6.slice/crio-7fb1c06e3799be2f73f40c78bdf5f80d994e1323f535fb1975f191110b91f024 WatchSource:0}: Error finding container 7fb1c06e3799be2f73f40c78bdf5f80d994e1323f535fb1975f191110b91f024: Status 404 returned error can't find the container with id 7fb1c06e3799be2f73f40c78bdf5f80d994e1323f535fb1975f191110b91f024 Sep 30 12:31:40 crc kubenswrapper[5002]: I0930 12:31:40.219543 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/c350ea0c-aa9c-4ccb-9607-aeff49e295b1-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-p8l92\" (UID: \"c350ea0c-aa9c-4ccb-9607-aeff49e295b1\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-p8l92" Sep 30 12:31:40 crc kubenswrapper[5002]: I0930 12:31:40.223902 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/c350ea0c-aa9c-4ccb-9607-aeff49e295b1-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-p8l92\" (UID: \"c350ea0c-aa9c-4ccb-9607-aeff49e295b1\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-p8l92" Sep 30 12:31:40 crc kubenswrapper[5002]: I0930 12:31:40.258096 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-564c9f6756-ntpp5"] Sep 30 12:31:40 crc kubenswrapper[5002]: W0930 12:31:40.261932 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf870ee28_51a3_49a5_9deb_02a6c66de926.slice/crio-05d3fc58734903dc1d97bd7314e69ae4586dc1ed1be3bde5c5b39f3020c55f21 WatchSource:0}: Error finding container 05d3fc58734903dc1d97bd7314e69ae4586dc1ed1be3bde5c5b39f3020c55f21: Status 404 returned error can't find the container with id 05d3fc58734903dc1d97bd7314e69ae4586dc1ed1be3bde5c5b39f3020c55f21 Sep 30 12:31:40 crc kubenswrapper[5002]: I0930 12:31:40.462743 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-4s6bl" event={"ID":"b3b353e1-b31e-45ba-b22e-6e78fd291203","Type":"ContainerStarted","Data":"1afc159f8b4799ff6e709cf0e9a8e0055149268a24f48fa8071acbde8c1acc50"} Sep 30 12:31:40 crc kubenswrapper[5002]: I0930 12:31:40.465535 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-p8l92" Sep 30 12:31:40 crc kubenswrapper[5002]: I0930 12:31:40.466448 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-w7bgk" event={"ID":"54cfc79e-c203-4715-a7d4-0120f7577db6","Type":"ContainerStarted","Data":"7fb1c06e3799be2f73f40c78bdf5f80d994e1323f535fb1975f191110b91f024"} Sep 30 12:31:40 crc kubenswrapper[5002]: I0930 12:31:40.467564 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-5q5mt" event={"ID":"7a89ecb4-494a-40f2-8e0c-871b2c94f8a2","Type":"ContainerStarted","Data":"111521deef4f917799360c822acd63601aac3917dd189138f72c83c1fdc6bf22"} Sep 30 12:31:40 crc kubenswrapper[5002]: I0930 12:31:40.469270 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-564c9f6756-ntpp5" event={"ID":"f870ee28-51a3-49a5-9deb-02a6c66de926","Type":"ContainerStarted","Data":"8d3fd16cc37e352fb43a7b20eb15462e77d1beff2103ae2d7d157e11b3c183a8"} Sep 30 12:31:40 crc kubenswrapper[5002]: I0930 12:31:40.469294 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-564c9f6756-ntpp5" event={"ID":"f870ee28-51a3-49a5-9deb-02a6c66de926","Type":"ContainerStarted","Data":"05d3fc58734903dc1d97bd7314e69ae4586dc1ed1be3bde5c5b39f3020c55f21"} Sep 30 12:31:40 crc kubenswrapper[5002]: I0930 12:31:40.715493 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-564c9f6756-ntpp5" podStartSLOduration=1.7154505439999999 podStartE2EDuration="1.715450544s" podCreationTimestamp="2025-09-30 12:31:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:31:40.498731581 +0000 UTC m=+674.748413817" watchObservedRunningTime="2025-09-30 12:31:40.715450544 +0000 UTC m=+674.965132700" Sep 30 12:31:40 crc kubenswrapper[5002]: I0930 12:31:40.715995 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-p8l92"] Sep 30 12:31:41 crc kubenswrapper[5002]: I0930 12:31:41.482522 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-p8l92" event={"ID":"c350ea0c-aa9c-4ccb-9607-aeff49e295b1","Type":"ContainerStarted","Data":"f687054ca8e38da32b41bd238e5cc7aac2f29df3b99164fd17ff6ea3ce6f3a7d"} Sep 30 12:31:43 crc kubenswrapper[5002]: I0930 12:31:43.500771 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-4s6bl" event={"ID":"b3b353e1-b31e-45ba-b22e-6e78fd291203","Type":"ContainerStarted","Data":"3d9c78df6a0d206644061cbf537babbd3e90a85b4556b89c180d5e27ec337624"} Sep 30 12:31:43 crc kubenswrapper[5002]: I0930 12:31:43.501175 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-4s6bl" Sep 30 12:31:43 crc kubenswrapper[5002]: I0930 12:31:43.502558 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-w7bgk" event={"ID":"54cfc79e-c203-4715-a7d4-0120f7577db6","Type":"ContainerStarted","Data":"71b3695123d2e4f148e460836e08ab9d892f92cb608da4b0bcfe2fdf53184f7a"} Sep 30 12:31:43 crc kubenswrapper[5002]: I0930 12:31:43.504304 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-5q5mt" event={"ID":"7a89ecb4-494a-40f2-8e0c-871b2c94f8a2","Type":"ContainerStarted","Data":"e1ac92bde30a14a1de6912f8642015fa165288401fd65da0a81d20d8d0213617"} Sep 30 12:31:43 crc kubenswrapper[5002]: I0930 12:31:43.504511 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6d689559c5-5q5mt" Sep 30 12:31:43 crc kubenswrapper[5002]: I0930 12:31:43.517254 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-4s6bl" podStartSLOduration=1.437402846 podStartE2EDuration="4.517234448s" podCreationTimestamp="2025-09-30 12:31:39 +0000 UTC" firstStartedPulling="2025-09-30 12:31:39.778921874 +0000 UTC m=+674.028604010" lastFinishedPulling="2025-09-30 12:31:42.858753426 +0000 UTC m=+677.108435612" observedRunningTime="2025-09-30 12:31:43.51550316 +0000 UTC m=+677.765185326" watchObservedRunningTime="2025-09-30 12:31:43.517234448 +0000 UTC m=+677.766916594" Sep 30 12:31:44 crc kubenswrapper[5002]: I0930 12:31:44.512174 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-p8l92" event={"ID":"c350ea0c-aa9c-4ccb-9607-aeff49e295b1","Type":"ContainerStarted","Data":"8a97dc59c75c94868a83c4f5459dae763d09ce544a171eef99ea9bc7776579ff"} Sep 30 12:31:44 crc kubenswrapper[5002]: I0930 12:31:44.526083 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6d689559c5-5q5mt" podStartSLOduration=2.6722870629999997 podStartE2EDuration="5.526064481s" podCreationTimestamp="2025-09-30 12:31:39 +0000 UTC" firstStartedPulling="2025-09-30 12:31:39.994435923 +0000 UTC m=+674.244118079" lastFinishedPulling="2025-09-30 12:31:42.848213311 +0000 UTC m=+677.097895497" observedRunningTime="2025-09-30 12:31:43.530062527 +0000 UTC m=+677.779744693" watchObservedRunningTime="2025-09-30 12:31:44.526064481 +0000 UTC m=+678.775746647" Sep 30 12:31:44 crc kubenswrapper[5002]: I0930 12:31:44.529447 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-p8l92" podStartSLOduration=2.487532863 podStartE2EDuration="5.529433275s" podCreationTimestamp="2025-09-30 12:31:39 +0000 UTC" firstStartedPulling="2025-09-30 12:31:40.722257974 +0000 UTC m=+674.971940110" lastFinishedPulling="2025-09-30 12:31:43.764158376 +0000 UTC m=+678.013840522" observedRunningTime="2025-09-30 12:31:44.523255242 +0000 UTC m=+678.772937388" watchObservedRunningTime="2025-09-30 12:31:44.529433275 +0000 UTC m=+678.779115441" Sep 30 12:31:46 crc kubenswrapper[5002]: I0930 12:31:46.526076 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-w7bgk" event={"ID":"54cfc79e-c203-4715-a7d4-0120f7577db6","Type":"ContainerStarted","Data":"39dcfa2f776c7dcbd086a67041d1484afff70ad25cbc3fc742e29a30edd8c91b"} Sep 30 12:31:46 crc kubenswrapper[5002]: I0930 12:31:46.541331 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-58fcddf996-w7bgk" podStartSLOduration=2.159907919 podStartE2EDuration="7.5413048s" podCreationTimestamp="2025-09-30 12:31:39 +0000 UTC" firstStartedPulling="2025-09-30 12:31:40.13944032 +0000 UTC m=+674.389122466" lastFinishedPulling="2025-09-30 12:31:45.520837161 +0000 UTC m=+679.770519347" observedRunningTime="2025-09-30 12:31:46.539004236 +0000 UTC m=+680.788686392" watchObservedRunningTime="2025-09-30 12:31:46.5413048 +0000 UTC m=+680.790986976" Sep 30 12:31:49 crc kubenswrapper[5002]: I0930 12:31:49.773135 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-4s6bl" Sep 30 12:31:50 crc kubenswrapper[5002]: I0930 12:31:50.071317 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-564c9f6756-ntpp5" Sep 30 12:31:50 crc kubenswrapper[5002]: I0930 12:31:50.071370 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-564c9f6756-ntpp5" Sep 30 12:31:50 crc kubenswrapper[5002]: I0930 12:31:50.076236 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-564c9f6756-ntpp5" Sep 30 12:31:50 crc kubenswrapper[5002]: I0930 12:31:50.554831 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-564c9f6756-ntpp5" Sep 30 12:31:50 crc kubenswrapper[5002]: I0930 12:31:50.600900 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-79sft"] Sep 30 12:31:59 crc kubenswrapper[5002]: I0930 12:31:59.758644 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6d689559c5-5q5mt" Sep 30 12:32:02 crc kubenswrapper[5002]: I0930 12:32:02.098331 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:32:02 crc kubenswrapper[5002]: I0930 12:32:02.098404 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:32:13 crc kubenswrapper[5002]: I0930 12:32:13.189468 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5"] Sep 30 12:32:13 crc kubenswrapper[5002]: I0930 12:32:13.192371 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5" Sep 30 12:32:13 crc kubenswrapper[5002]: I0930 12:32:13.195080 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 30 12:32:13 crc kubenswrapper[5002]: I0930 12:32:13.204618 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5"] Sep 30 12:32:13 crc kubenswrapper[5002]: I0930 12:32:13.320156 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9da270d3-58ec-44e6-acaa-3cb86fbc2047-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5\" (UID: \"9da270d3-58ec-44e6-acaa-3cb86fbc2047\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5" Sep 30 12:32:13 crc kubenswrapper[5002]: I0930 12:32:13.320209 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnpmw\" (UniqueName: \"kubernetes.io/projected/9da270d3-58ec-44e6-acaa-3cb86fbc2047-kube-api-access-mnpmw\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5\" (UID: \"9da270d3-58ec-44e6-acaa-3cb86fbc2047\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5" Sep 30 12:32:13 crc kubenswrapper[5002]: I0930 12:32:13.320248 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9da270d3-58ec-44e6-acaa-3cb86fbc2047-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5\" (UID: \"9da270d3-58ec-44e6-acaa-3cb86fbc2047\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5" Sep 30 12:32:13 crc kubenswrapper[5002]: I0930 12:32:13.421138 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9da270d3-58ec-44e6-acaa-3cb86fbc2047-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5\" (UID: \"9da270d3-58ec-44e6-acaa-3cb86fbc2047\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5" Sep 30 12:32:13 crc kubenswrapper[5002]: I0930 12:32:13.421608 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9da270d3-58ec-44e6-acaa-3cb86fbc2047-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5\" (UID: \"9da270d3-58ec-44e6-acaa-3cb86fbc2047\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5" Sep 30 12:32:13 crc kubenswrapper[5002]: I0930 12:32:13.421648 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9da270d3-58ec-44e6-acaa-3cb86fbc2047-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5\" (UID: \"9da270d3-58ec-44e6-acaa-3cb86fbc2047\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5" Sep 30 12:32:13 crc kubenswrapper[5002]: I0930 12:32:13.421712 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnpmw\" (UniqueName: \"kubernetes.io/projected/9da270d3-58ec-44e6-acaa-3cb86fbc2047-kube-api-access-mnpmw\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5\" (UID: \"9da270d3-58ec-44e6-acaa-3cb86fbc2047\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5" Sep 30 12:32:13 crc kubenswrapper[5002]: I0930 12:32:13.422688 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9da270d3-58ec-44e6-acaa-3cb86fbc2047-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5\" (UID: \"9da270d3-58ec-44e6-acaa-3cb86fbc2047\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5" Sep 30 12:32:13 crc kubenswrapper[5002]: I0930 12:32:13.457068 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnpmw\" (UniqueName: \"kubernetes.io/projected/9da270d3-58ec-44e6-acaa-3cb86fbc2047-kube-api-access-mnpmw\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5\" (UID: \"9da270d3-58ec-44e6-acaa-3cb86fbc2047\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5" Sep 30 12:32:13 crc kubenswrapper[5002]: I0930 12:32:13.519050 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5" Sep 30 12:32:13 crc kubenswrapper[5002]: I0930 12:32:13.699848 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5"] Sep 30 12:32:13 crc kubenswrapper[5002]: W0930 12:32:13.708014 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9da270d3_58ec_44e6_acaa_3cb86fbc2047.slice/crio-652ecb8eb9f3600bdb497a9ea0e0e8178261bc41f3846426b48b67631fca404d WatchSource:0}: Error finding container 652ecb8eb9f3600bdb497a9ea0e0e8178261bc41f3846426b48b67631fca404d: Status 404 returned error can't find the container with id 652ecb8eb9f3600bdb497a9ea0e0e8178261bc41f3846426b48b67631fca404d Sep 30 12:32:14 crc kubenswrapper[5002]: I0930 12:32:14.690181 5002 generic.go:334] "Generic (PLEG): container finished" podID="9da270d3-58ec-44e6-acaa-3cb86fbc2047" containerID="3d6a34c91a7393ab22f9e53a073bcc70ec4df805fce6855385d2786ab3dd09da" exitCode=0 Sep 30 12:32:14 crc kubenswrapper[5002]: I0930 12:32:14.690236 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5" event={"ID":"9da270d3-58ec-44e6-acaa-3cb86fbc2047","Type":"ContainerDied","Data":"3d6a34c91a7393ab22f9e53a073bcc70ec4df805fce6855385d2786ab3dd09da"} Sep 30 12:32:14 crc kubenswrapper[5002]: I0930 12:32:14.690276 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5" event={"ID":"9da270d3-58ec-44e6-acaa-3cb86fbc2047","Type":"ContainerStarted","Data":"652ecb8eb9f3600bdb497a9ea0e0e8178261bc41f3846426b48b67631fca404d"} Sep 30 12:32:15 crc kubenswrapper[5002]: I0930 12:32:15.646310 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-79sft" podUID="7151e9d9-4417-40bb-aac5-8f838065aa79" containerName="console" containerID="cri-o://db9f35aac93001499f179a91ffed4dd41ca88574dcf65e4ed661bc61bc833a56" gracePeriod=15 Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.011024 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-79sft_7151e9d9-4417-40bb-aac5-8f838065aa79/console/0.log" Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.011285 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-79sft" Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.151727 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qc7nq\" (UniqueName: \"kubernetes.io/projected/7151e9d9-4417-40bb-aac5-8f838065aa79-kube-api-access-qc7nq\") pod \"7151e9d9-4417-40bb-aac5-8f838065aa79\" (UID: \"7151e9d9-4417-40bb-aac5-8f838065aa79\") " Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.151797 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7151e9d9-4417-40bb-aac5-8f838065aa79-console-oauth-config\") pod \"7151e9d9-4417-40bb-aac5-8f838065aa79\" (UID: \"7151e9d9-4417-40bb-aac5-8f838065aa79\") " Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.151841 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7151e9d9-4417-40bb-aac5-8f838065aa79-trusted-ca-bundle\") pod \"7151e9d9-4417-40bb-aac5-8f838065aa79\" (UID: \"7151e9d9-4417-40bb-aac5-8f838065aa79\") " Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.151882 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7151e9d9-4417-40bb-aac5-8f838065aa79-console-config\") pod \"7151e9d9-4417-40bb-aac5-8f838065aa79\" (UID: \"7151e9d9-4417-40bb-aac5-8f838065aa79\") " Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.151911 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7151e9d9-4417-40bb-aac5-8f838065aa79-oauth-serving-cert\") pod \"7151e9d9-4417-40bb-aac5-8f838065aa79\" (UID: \"7151e9d9-4417-40bb-aac5-8f838065aa79\") " Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.151999 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7151e9d9-4417-40bb-aac5-8f838065aa79-service-ca\") pod \"7151e9d9-4417-40bb-aac5-8f838065aa79\" (UID: \"7151e9d9-4417-40bb-aac5-8f838065aa79\") " Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.152028 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7151e9d9-4417-40bb-aac5-8f838065aa79-console-serving-cert\") pod \"7151e9d9-4417-40bb-aac5-8f838065aa79\" (UID: \"7151e9d9-4417-40bb-aac5-8f838065aa79\") " Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.153012 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7151e9d9-4417-40bb-aac5-8f838065aa79-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "7151e9d9-4417-40bb-aac5-8f838065aa79" (UID: "7151e9d9-4417-40bb-aac5-8f838065aa79"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.153081 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7151e9d9-4417-40bb-aac5-8f838065aa79-service-ca" (OuterVolumeSpecName: "service-ca") pod "7151e9d9-4417-40bb-aac5-8f838065aa79" (UID: "7151e9d9-4417-40bb-aac5-8f838065aa79"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.153185 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7151e9d9-4417-40bb-aac5-8f838065aa79-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "7151e9d9-4417-40bb-aac5-8f838065aa79" (UID: "7151e9d9-4417-40bb-aac5-8f838065aa79"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.153194 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7151e9d9-4417-40bb-aac5-8f838065aa79-console-config" (OuterVolumeSpecName: "console-config") pod "7151e9d9-4417-40bb-aac5-8f838065aa79" (UID: "7151e9d9-4417-40bb-aac5-8f838065aa79"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.157534 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7151e9d9-4417-40bb-aac5-8f838065aa79-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "7151e9d9-4417-40bb-aac5-8f838065aa79" (UID: "7151e9d9-4417-40bb-aac5-8f838065aa79"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.158069 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7151e9d9-4417-40bb-aac5-8f838065aa79-kube-api-access-qc7nq" (OuterVolumeSpecName: "kube-api-access-qc7nq") pod "7151e9d9-4417-40bb-aac5-8f838065aa79" (UID: "7151e9d9-4417-40bb-aac5-8f838065aa79"). InnerVolumeSpecName "kube-api-access-qc7nq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.158530 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7151e9d9-4417-40bb-aac5-8f838065aa79-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "7151e9d9-4417-40bb-aac5-8f838065aa79" (UID: "7151e9d9-4417-40bb-aac5-8f838065aa79"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.253001 5002 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7151e9d9-4417-40bb-aac5-8f838065aa79-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.253035 5002 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7151e9d9-4417-40bb-aac5-8f838065aa79-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.253049 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qc7nq\" (UniqueName: \"kubernetes.io/projected/7151e9d9-4417-40bb-aac5-8f838065aa79-kube-api-access-qc7nq\") on node \"crc\" DevicePath \"\"" Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.253059 5002 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7151e9d9-4417-40bb-aac5-8f838065aa79-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.253071 5002 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7151e9d9-4417-40bb-aac5-8f838065aa79-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.253081 5002 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7151e9d9-4417-40bb-aac5-8f838065aa79-console-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.253089 5002 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7151e9d9-4417-40bb-aac5-8f838065aa79-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.702896 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-79sft_7151e9d9-4417-40bb-aac5-8f838065aa79/console/0.log" Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.702949 5002 generic.go:334] "Generic (PLEG): container finished" podID="7151e9d9-4417-40bb-aac5-8f838065aa79" containerID="db9f35aac93001499f179a91ffed4dd41ca88574dcf65e4ed661bc61bc833a56" exitCode=2 Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.702981 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-79sft" event={"ID":"7151e9d9-4417-40bb-aac5-8f838065aa79","Type":"ContainerDied","Data":"db9f35aac93001499f179a91ffed4dd41ca88574dcf65e4ed661bc61bc833a56"} Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.703010 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-79sft" event={"ID":"7151e9d9-4417-40bb-aac5-8f838065aa79","Type":"ContainerDied","Data":"a1c29cdfaebcba48a1a0b36cc0b0389612b116f5259c975ec5730e77fa8cfe19"} Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.703021 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-79sft" Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.703030 5002 scope.go:117] "RemoveContainer" containerID="db9f35aac93001499f179a91ffed4dd41ca88574dcf65e4ed661bc61bc833a56" Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.723821 5002 scope.go:117] "RemoveContainer" containerID="db9f35aac93001499f179a91ffed4dd41ca88574dcf65e4ed661bc61bc833a56" Sep 30 12:32:16 crc kubenswrapper[5002]: E0930 12:32:16.725548 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db9f35aac93001499f179a91ffed4dd41ca88574dcf65e4ed661bc61bc833a56\": container with ID starting with db9f35aac93001499f179a91ffed4dd41ca88574dcf65e4ed661bc61bc833a56 not found: ID does not exist" containerID="db9f35aac93001499f179a91ffed4dd41ca88574dcf65e4ed661bc61bc833a56" Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.725617 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db9f35aac93001499f179a91ffed4dd41ca88574dcf65e4ed661bc61bc833a56"} err="failed to get container status \"db9f35aac93001499f179a91ffed4dd41ca88574dcf65e4ed661bc61bc833a56\": rpc error: code = NotFound desc = could not find container \"db9f35aac93001499f179a91ffed4dd41ca88574dcf65e4ed661bc61bc833a56\": container with ID starting with db9f35aac93001499f179a91ffed4dd41ca88574dcf65e4ed661bc61bc833a56 not found: ID does not exist" Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.736427 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-79sft"] Sep 30 12:32:16 crc kubenswrapper[5002]: I0930 12:32:16.742157 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-79sft"] Sep 30 12:32:17 crc kubenswrapper[5002]: I0930 12:32:17.713365 5002 generic.go:334] "Generic (PLEG): container finished" podID="9da270d3-58ec-44e6-acaa-3cb86fbc2047" containerID="3cc0154d40eb470fa491d1f3e198c5e03ab632c750cc597d8662237b5df7ae7c" exitCode=0 Sep 30 12:32:17 crc kubenswrapper[5002]: I0930 12:32:17.713418 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5" event={"ID":"9da270d3-58ec-44e6-acaa-3cb86fbc2047","Type":"ContainerDied","Data":"3cc0154d40eb470fa491d1f3e198c5e03ab632c750cc597d8662237b5df7ae7c"} Sep 30 12:32:18 crc kubenswrapper[5002]: I0930 12:32:18.692964 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7151e9d9-4417-40bb-aac5-8f838065aa79" path="/var/lib/kubelet/pods/7151e9d9-4417-40bb-aac5-8f838065aa79/volumes" Sep 30 12:32:18 crc kubenswrapper[5002]: I0930 12:32:18.726222 5002 generic.go:334] "Generic (PLEG): container finished" podID="9da270d3-58ec-44e6-acaa-3cb86fbc2047" containerID="21b32f17eab4ec1e07aa3d9cfa58ac85df6af17a90f04bd29dd7596f03efb1ca" exitCode=0 Sep 30 12:32:18 crc kubenswrapper[5002]: I0930 12:32:18.726263 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5" event={"ID":"9da270d3-58ec-44e6-acaa-3cb86fbc2047","Type":"ContainerDied","Data":"21b32f17eab4ec1e07aa3d9cfa58ac85df6af17a90f04bd29dd7596f03efb1ca"} Sep 30 12:32:20 crc kubenswrapper[5002]: I0930 12:32:20.047639 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5" Sep 30 12:32:20 crc kubenswrapper[5002]: I0930 12:32:20.208275 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnpmw\" (UniqueName: \"kubernetes.io/projected/9da270d3-58ec-44e6-acaa-3cb86fbc2047-kube-api-access-mnpmw\") pod \"9da270d3-58ec-44e6-acaa-3cb86fbc2047\" (UID: \"9da270d3-58ec-44e6-acaa-3cb86fbc2047\") " Sep 30 12:32:20 crc kubenswrapper[5002]: I0930 12:32:20.208433 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9da270d3-58ec-44e6-acaa-3cb86fbc2047-util\") pod \"9da270d3-58ec-44e6-acaa-3cb86fbc2047\" (UID: \"9da270d3-58ec-44e6-acaa-3cb86fbc2047\") " Sep 30 12:32:20 crc kubenswrapper[5002]: I0930 12:32:20.208517 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9da270d3-58ec-44e6-acaa-3cb86fbc2047-bundle\") pod \"9da270d3-58ec-44e6-acaa-3cb86fbc2047\" (UID: \"9da270d3-58ec-44e6-acaa-3cb86fbc2047\") " Sep 30 12:32:20 crc kubenswrapper[5002]: I0930 12:32:20.210339 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9da270d3-58ec-44e6-acaa-3cb86fbc2047-bundle" (OuterVolumeSpecName: "bundle") pod "9da270d3-58ec-44e6-acaa-3cb86fbc2047" (UID: "9da270d3-58ec-44e6-acaa-3cb86fbc2047"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:32:20 crc kubenswrapper[5002]: I0930 12:32:20.215085 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9da270d3-58ec-44e6-acaa-3cb86fbc2047-kube-api-access-mnpmw" (OuterVolumeSpecName: "kube-api-access-mnpmw") pod "9da270d3-58ec-44e6-acaa-3cb86fbc2047" (UID: "9da270d3-58ec-44e6-acaa-3cb86fbc2047"). InnerVolumeSpecName "kube-api-access-mnpmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:32:20 crc kubenswrapper[5002]: I0930 12:32:20.224187 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9da270d3-58ec-44e6-acaa-3cb86fbc2047-util" (OuterVolumeSpecName: "util") pod "9da270d3-58ec-44e6-acaa-3cb86fbc2047" (UID: "9da270d3-58ec-44e6-acaa-3cb86fbc2047"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:32:20 crc kubenswrapper[5002]: I0930 12:32:20.310249 5002 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9da270d3-58ec-44e6-acaa-3cb86fbc2047-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:32:20 crc kubenswrapper[5002]: I0930 12:32:20.310552 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnpmw\" (UniqueName: \"kubernetes.io/projected/9da270d3-58ec-44e6-acaa-3cb86fbc2047-kube-api-access-mnpmw\") on node \"crc\" DevicePath \"\"" Sep 30 12:32:20 crc kubenswrapper[5002]: I0930 12:32:20.310645 5002 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9da270d3-58ec-44e6-acaa-3cb86fbc2047-util\") on node \"crc\" DevicePath \"\"" Sep 30 12:32:20 crc kubenswrapper[5002]: I0930 12:32:20.742580 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5" event={"ID":"9da270d3-58ec-44e6-acaa-3cb86fbc2047","Type":"ContainerDied","Data":"652ecb8eb9f3600bdb497a9ea0e0e8178261bc41f3846426b48b67631fca404d"} Sep 30 12:32:20 crc kubenswrapper[5002]: I0930 12:32:20.742626 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="652ecb8eb9f3600bdb497a9ea0e0e8178261bc41f3846426b48b67631fca404d" Sep 30 12:32:20 crc kubenswrapper[5002]: I0930 12:32:20.742644 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.241826 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-576d687654-bw9lz"] Sep 30 12:32:31 crc kubenswrapper[5002]: E0930 12:32:31.242442 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9da270d3-58ec-44e6-acaa-3cb86fbc2047" containerName="extract" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.242454 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9da270d3-58ec-44e6-acaa-3cb86fbc2047" containerName="extract" Sep 30 12:32:31 crc kubenswrapper[5002]: E0930 12:32:31.242463 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9da270d3-58ec-44e6-acaa-3cb86fbc2047" containerName="util" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.242507 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9da270d3-58ec-44e6-acaa-3cb86fbc2047" containerName="util" Sep 30 12:32:31 crc kubenswrapper[5002]: E0930 12:32:31.242519 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7151e9d9-4417-40bb-aac5-8f838065aa79" containerName="console" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.242525 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="7151e9d9-4417-40bb-aac5-8f838065aa79" containerName="console" Sep 30 12:32:31 crc kubenswrapper[5002]: E0930 12:32:31.242540 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9da270d3-58ec-44e6-acaa-3cb86fbc2047" containerName="pull" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.242546 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9da270d3-58ec-44e6-acaa-3cb86fbc2047" containerName="pull" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.242630 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="7151e9d9-4417-40bb-aac5-8f838065aa79" containerName="console" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.242643 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="9da270d3-58ec-44e6-acaa-3cb86fbc2047" containerName="extract" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.243061 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-576d687654-bw9lz" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.246487 5002 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.246769 5002 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.247500 5002 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-2xjwn" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.247551 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.247828 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.265298 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-576d687654-bw9lz"] Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.345109 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/27c28a83-93f3-40e5-9430-e95593fb9b70-apiservice-cert\") pod \"metallb-operator-controller-manager-576d687654-bw9lz\" (UID: \"27c28a83-93f3-40e5-9430-e95593fb9b70\") " pod="metallb-system/metallb-operator-controller-manager-576d687654-bw9lz" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.345222 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/27c28a83-93f3-40e5-9430-e95593fb9b70-webhook-cert\") pod \"metallb-operator-controller-manager-576d687654-bw9lz\" (UID: \"27c28a83-93f3-40e5-9430-e95593fb9b70\") " pod="metallb-system/metallb-operator-controller-manager-576d687654-bw9lz" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.345251 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4xw7\" (UniqueName: \"kubernetes.io/projected/27c28a83-93f3-40e5-9430-e95593fb9b70-kube-api-access-l4xw7\") pod \"metallb-operator-controller-manager-576d687654-bw9lz\" (UID: \"27c28a83-93f3-40e5-9430-e95593fb9b70\") " pod="metallb-system/metallb-operator-controller-manager-576d687654-bw9lz" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.446985 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/27c28a83-93f3-40e5-9430-e95593fb9b70-webhook-cert\") pod \"metallb-operator-controller-manager-576d687654-bw9lz\" (UID: \"27c28a83-93f3-40e5-9430-e95593fb9b70\") " pod="metallb-system/metallb-operator-controller-manager-576d687654-bw9lz" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.447061 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4xw7\" (UniqueName: \"kubernetes.io/projected/27c28a83-93f3-40e5-9430-e95593fb9b70-kube-api-access-l4xw7\") pod \"metallb-operator-controller-manager-576d687654-bw9lz\" (UID: \"27c28a83-93f3-40e5-9430-e95593fb9b70\") " pod="metallb-system/metallb-operator-controller-manager-576d687654-bw9lz" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.447122 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/27c28a83-93f3-40e5-9430-e95593fb9b70-apiservice-cert\") pod \"metallb-operator-controller-manager-576d687654-bw9lz\" (UID: \"27c28a83-93f3-40e5-9430-e95593fb9b70\") " pod="metallb-system/metallb-operator-controller-manager-576d687654-bw9lz" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.453317 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/27c28a83-93f3-40e5-9430-e95593fb9b70-apiservice-cert\") pod \"metallb-operator-controller-manager-576d687654-bw9lz\" (UID: \"27c28a83-93f3-40e5-9430-e95593fb9b70\") " pod="metallb-system/metallb-operator-controller-manager-576d687654-bw9lz" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.468540 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/27c28a83-93f3-40e5-9430-e95593fb9b70-webhook-cert\") pod \"metallb-operator-controller-manager-576d687654-bw9lz\" (UID: \"27c28a83-93f3-40e5-9430-e95593fb9b70\") " pod="metallb-system/metallb-operator-controller-manager-576d687654-bw9lz" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.469015 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4xw7\" (UniqueName: \"kubernetes.io/projected/27c28a83-93f3-40e5-9430-e95593fb9b70-kube-api-access-l4xw7\") pod \"metallb-operator-controller-manager-576d687654-bw9lz\" (UID: \"27c28a83-93f3-40e5-9430-e95593fb9b70\") " pod="metallb-system/metallb-operator-controller-manager-576d687654-bw9lz" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.562199 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-576d687654-bw9lz" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.563191 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-6dd5844d8b-5jxxh"] Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.564206 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6dd5844d8b-5jxxh" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.567740 5002 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.567810 5002 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-zzq2j" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.567928 5002 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.572713 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6dd5844d8b-5jxxh"] Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.750645 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6e455bb5-7fde-4e6e-a287-e053991325b2-apiservice-cert\") pod \"metallb-operator-webhook-server-6dd5844d8b-5jxxh\" (UID: \"6e455bb5-7fde-4e6e-a287-e053991325b2\") " pod="metallb-system/metallb-operator-webhook-server-6dd5844d8b-5jxxh" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.750747 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6e455bb5-7fde-4e6e-a287-e053991325b2-webhook-cert\") pod \"metallb-operator-webhook-server-6dd5844d8b-5jxxh\" (UID: \"6e455bb5-7fde-4e6e-a287-e053991325b2\") " pod="metallb-system/metallb-operator-webhook-server-6dd5844d8b-5jxxh" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.750772 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrtvq\" (UniqueName: \"kubernetes.io/projected/6e455bb5-7fde-4e6e-a287-e053991325b2-kube-api-access-rrtvq\") pod \"metallb-operator-webhook-server-6dd5844d8b-5jxxh\" (UID: \"6e455bb5-7fde-4e6e-a287-e053991325b2\") " pod="metallb-system/metallb-operator-webhook-server-6dd5844d8b-5jxxh" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.852134 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6e455bb5-7fde-4e6e-a287-e053991325b2-apiservice-cert\") pod \"metallb-operator-webhook-server-6dd5844d8b-5jxxh\" (UID: \"6e455bb5-7fde-4e6e-a287-e053991325b2\") " pod="metallb-system/metallb-operator-webhook-server-6dd5844d8b-5jxxh" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.852222 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6e455bb5-7fde-4e6e-a287-e053991325b2-webhook-cert\") pod \"metallb-operator-webhook-server-6dd5844d8b-5jxxh\" (UID: \"6e455bb5-7fde-4e6e-a287-e053991325b2\") " pod="metallb-system/metallb-operator-webhook-server-6dd5844d8b-5jxxh" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.852242 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrtvq\" (UniqueName: \"kubernetes.io/projected/6e455bb5-7fde-4e6e-a287-e053991325b2-kube-api-access-rrtvq\") pod \"metallb-operator-webhook-server-6dd5844d8b-5jxxh\" (UID: \"6e455bb5-7fde-4e6e-a287-e053991325b2\") " pod="metallb-system/metallb-operator-webhook-server-6dd5844d8b-5jxxh" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.857440 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6e455bb5-7fde-4e6e-a287-e053991325b2-webhook-cert\") pod \"metallb-operator-webhook-server-6dd5844d8b-5jxxh\" (UID: \"6e455bb5-7fde-4e6e-a287-e053991325b2\") " pod="metallb-system/metallb-operator-webhook-server-6dd5844d8b-5jxxh" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.860738 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6e455bb5-7fde-4e6e-a287-e053991325b2-apiservice-cert\") pod \"metallb-operator-webhook-server-6dd5844d8b-5jxxh\" (UID: \"6e455bb5-7fde-4e6e-a287-e053991325b2\") " pod="metallb-system/metallb-operator-webhook-server-6dd5844d8b-5jxxh" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.874218 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrtvq\" (UniqueName: \"kubernetes.io/projected/6e455bb5-7fde-4e6e-a287-e053991325b2-kube-api-access-rrtvq\") pod \"metallb-operator-webhook-server-6dd5844d8b-5jxxh\" (UID: \"6e455bb5-7fde-4e6e-a287-e053991325b2\") " pod="metallb-system/metallb-operator-webhook-server-6dd5844d8b-5jxxh" Sep 30 12:32:31 crc kubenswrapper[5002]: I0930 12:32:31.891111 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6dd5844d8b-5jxxh" Sep 30 12:32:32 crc kubenswrapper[5002]: I0930 12:32:32.089161 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-576d687654-bw9lz"] Sep 30 12:32:32 crc kubenswrapper[5002]: I0930 12:32:32.102620 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:32:32 crc kubenswrapper[5002]: I0930 12:32:32.102668 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:32:32 crc kubenswrapper[5002]: I0930 12:32:32.240407 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6dd5844d8b-5jxxh"] Sep 30 12:32:32 crc kubenswrapper[5002]: W0930 12:32:32.245538 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6e455bb5_7fde_4e6e_a287_e053991325b2.slice/crio-b1ac4e5307f37549978b6cab9363c768773b0957e808dd87a8183431bae861b5 WatchSource:0}: Error finding container b1ac4e5307f37549978b6cab9363c768773b0957e808dd87a8183431bae861b5: Status 404 returned error can't find the container with id b1ac4e5307f37549978b6cab9363c768773b0957e808dd87a8183431bae861b5 Sep 30 12:32:32 crc kubenswrapper[5002]: I0930 12:32:32.809672 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-576d687654-bw9lz" event={"ID":"27c28a83-93f3-40e5-9430-e95593fb9b70","Type":"ContainerStarted","Data":"e4fc81143b09406b2b5235a12eaf9e16549ad5af20e4518527709c95658f6406"} Sep 30 12:32:32 crc kubenswrapper[5002]: I0930 12:32:32.812781 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6dd5844d8b-5jxxh" event={"ID":"6e455bb5-7fde-4e6e-a287-e053991325b2","Type":"ContainerStarted","Data":"b1ac4e5307f37549978b6cab9363c768773b0957e808dd87a8183431bae861b5"} Sep 30 12:32:37 crc kubenswrapper[5002]: I0930 12:32:37.841160 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6dd5844d8b-5jxxh" event={"ID":"6e455bb5-7fde-4e6e-a287-e053991325b2","Type":"ContainerStarted","Data":"f931b056caba189fd1ee80a9880069ec5d7b727bb7879c1f243072cb97550b48"} Sep 30 12:32:37 crc kubenswrapper[5002]: I0930 12:32:37.841757 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-6dd5844d8b-5jxxh" Sep 30 12:32:37 crc kubenswrapper[5002]: I0930 12:32:37.843143 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-576d687654-bw9lz" event={"ID":"27c28a83-93f3-40e5-9430-e95593fb9b70","Type":"ContainerStarted","Data":"01e09af719c07cc737c0d8f5cb74d693232d783c70451423ec478992b94df2af"} Sep 30 12:32:37 crc kubenswrapper[5002]: I0930 12:32:37.843272 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-576d687654-bw9lz" Sep 30 12:32:37 crc kubenswrapper[5002]: I0930 12:32:37.865294 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-6dd5844d8b-5jxxh" podStartSLOduration=1.9172188220000002 podStartE2EDuration="6.8652685s" podCreationTimestamp="2025-09-30 12:32:31 +0000 UTC" firstStartedPulling="2025-09-30 12:32:32.248751258 +0000 UTC m=+726.498433404" lastFinishedPulling="2025-09-30 12:32:37.196800936 +0000 UTC m=+731.446483082" observedRunningTime="2025-09-30 12:32:37.861359275 +0000 UTC m=+732.111041431" watchObservedRunningTime="2025-09-30 12:32:37.8652685 +0000 UTC m=+732.114950656" Sep 30 12:32:37 crc kubenswrapper[5002]: I0930 12:32:37.910355 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-576d687654-bw9lz" podStartSLOduration=1.827886063 podStartE2EDuration="6.910338535s" podCreationTimestamp="2025-09-30 12:32:31 +0000 UTC" firstStartedPulling="2025-09-30 12:32:32.107763238 +0000 UTC m=+726.357445384" lastFinishedPulling="2025-09-30 12:32:37.19021571 +0000 UTC m=+731.439897856" observedRunningTime="2025-09-30 12:32:37.906223065 +0000 UTC m=+732.155905221" watchObservedRunningTime="2025-09-30 12:32:37.910338535 +0000 UTC m=+732.160020691" Sep 30 12:32:51 crc kubenswrapper[5002]: I0930 12:32:51.896086 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-6dd5844d8b-5jxxh" Sep 30 12:32:52 crc kubenswrapper[5002]: I0930 12:32:52.634075 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-p582f"] Sep 30 12:32:52 crc kubenswrapper[5002]: I0930 12:32:52.634291 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-p582f" podUID="11cba069-c2a4-4b6c-8e3e-38c76d27f20a" containerName="controller-manager" containerID="cri-o://65e3484e040003c7a1eeda15c15f9e3e3a72be6329f94876d35c259e81a3ce25" gracePeriod=30 Sep 30 12:32:52 crc kubenswrapper[5002]: I0930 12:32:52.711937 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m"] Sep 30 12:32:52 crc kubenswrapper[5002]: I0930 12:32:52.712519 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m" podUID="1c0ed680-87c2-438b-aca7-b9fa1c19d414" containerName="route-controller-manager" containerID="cri-o://0112a1ce6c04952e2d2116012abdb3d17afad7d81dc44c97efa2c65ac68abbaa" gracePeriod=30 Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.766835 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-p582f" Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.804269 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-54dc747764-rnsdd"] Sep 30 12:32:53 crc kubenswrapper[5002]: E0930 12:32:53.804596 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11cba069-c2a4-4b6c-8e3e-38c76d27f20a" containerName="controller-manager" Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.804619 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="11cba069-c2a4-4b6c-8e3e-38c76d27f20a" containerName="controller-manager" Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.804757 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="11cba069-c2a4-4b6c-8e3e-38c76d27f20a" containerName="controller-manager" Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.805220 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-54dc747764-rnsdd" Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.856073 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m" Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.861852 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-54dc747764-rnsdd"] Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.934313 5002 generic.go:334] "Generic (PLEG): container finished" podID="11cba069-c2a4-4b6c-8e3e-38c76d27f20a" containerID="65e3484e040003c7a1eeda15c15f9e3e3a72be6329f94876d35c259e81a3ce25" exitCode=0 Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.934377 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-p582f" event={"ID":"11cba069-c2a4-4b6c-8e3e-38c76d27f20a","Type":"ContainerDied","Data":"65e3484e040003c7a1eeda15c15f9e3e3a72be6329f94876d35c259e81a3ce25"} Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.934403 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-p582f" event={"ID":"11cba069-c2a4-4b6c-8e3e-38c76d27f20a","Type":"ContainerDied","Data":"05f186dd2e8f8de6f2d12c0816835780e5703b1dab669f22d00fe43de3fa26ad"} Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.934403 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-p582f" Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.934419 5002 scope.go:117] "RemoveContainer" containerID="65e3484e040003c7a1eeda15c15f9e3e3a72be6329f94876d35c259e81a3ce25" Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.935794 5002 generic.go:334] "Generic (PLEG): container finished" podID="1c0ed680-87c2-438b-aca7-b9fa1c19d414" containerID="0112a1ce6c04952e2d2116012abdb3d17afad7d81dc44c97efa2c65ac68abbaa" exitCode=0 Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.935828 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m" Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.935837 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m" event={"ID":"1c0ed680-87c2-438b-aca7-b9fa1c19d414","Type":"ContainerDied","Data":"0112a1ce6c04952e2d2116012abdb3d17afad7d81dc44c97efa2c65ac68abbaa"} Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.935860 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m" event={"ID":"1c0ed680-87c2-438b-aca7-b9fa1c19d414","Type":"ContainerDied","Data":"794aca28c140bc8dad657b302415dcdf625450a569c0d573f5a490373e1cf4ca"} Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.943690 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-client-ca\") pod \"11cba069-c2a4-4b6c-8e3e-38c76d27f20a\" (UID: \"11cba069-c2a4-4b6c-8e3e-38c76d27f20a\") " Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.943748 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-proxy-ca-bundles\") pod \"11cba069-c2a4-4b6c-8e3e-38c76d27f20a\" (UID: \"11cba069-c2a4-4b6c-8e3e-38c76d27f20a\") " Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.943809 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c0ed680-87c2-438b-aca7-b9fa1c19d414-config\") pod \"1c0ed680-87c2-438b-aca7-b9fa1c19d414\" (UID: \"1c0ed680-87c2-438b-aca7-b9fa1c19d414\") " Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.943842 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-config\") pod \"11cba069-c2a4-4b6c-8e3e-38c76d27f20a\" (UID: \"11cba069-c2a4-4b6c-8e3e-38c76d27f20a\") " Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.943868 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1c0ed680-87c2-438b-aca7-b9fa1c19d414-serving-cert\") pod \"1c0ed680-87c2-438b-aca7-b9fa1c19d414\" (UID: \"1c0ed680-87c2-438b-aca7-b9fa1c19d414\") " Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.943907 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hpnvt\" (UniqueName: \"kubernetes.io/projected/1c0ed680-87c2-438b-aca7-b9fa1c19d414-kube-api-access-hpnvt\") pod \"1c0ed680-87c2-438b-aca7-b9fa1c19d414\" (UID: \"1c0ed680-87c2-438b-aca7-b9fa1c19d414\") " Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.944004 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-serving-cert\") pod \"11cba069-c2a4-4b6c-8e3e-38c76d27f20a\" (UID: \"11cba069-c2a4-4b6c-8e3e-38c76d27f20a\") " Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.944025 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1c0ed680-87c2-438b-aca7-b9fa1c19d414-client-ca\") pod \"1c0ed680-87c2-438b-aca7-b9fa1c19d414\" (UID: \"1c0ed680-87c2-438b-aca7-b9fa1c19d414\") " Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.944048 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4c29m\" (UniqueName: \"kubernetes.io/projected/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-kube-api-access-4c29m\") pod \"11cba069-c2a4-4b6c-8e3e-38c76d27f20a\" (UID: \"11cba069-c2a4-4b6c-8e3e-38c76d27f20a\") " Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.944192 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7d0e2e49-da4f-44da-9257-8fc36c6bceec-client-ca\") pod \"controller-manager-54dc747764-rnsdd\" (UID: \"7d0e2e49-da4f-44da-9257-8fc36c6bceec\") " pod="openshift-controller-manager/controller-manager-54dc747764-rnsdd" Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.944203 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-client-ca" (OuterVolumeSpecName: "client-ca") pod "11cba069-c2a4-4b6c-8e3e-38c76d27f20a" (UID: "11cba069-c2a4-4b6c-8e3e-38c76d27f20a"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.944241 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7d0e2e49-da4f-44da-9257-8fc36c6bceec-proxy-ca-bundles\") pod \"controller-manager-54dc747764-rnsdd\" (UID: \"7d0e2e49-da4f-44da-9257-8fc36c6bceec\") " pod="openshift-controller-manager/controller-manager-54dc747764-rnsdd" Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.944295 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d0e2e49-da4f-44da-9257-8fc36c6bceec-serving-cert\") pod \"controller-manager-54dc747764-rnsdd\" (UID: \"7d0e2e49-da4f-44da-9257-8fc36c6bceec\") " pod="openshift-controller-manager/controller-manager-54dc747764-rnsdd" Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.944343 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wbph\" (UniqueName: \"kubernetes.io/projected/7d0e2e49-da4f-44da-9257-8fc36c6bceec-kube-api-access-9wbph\") pod \"controller-manager-54dc747764-rnsdd\" (UID: \"7d0e2e49-da4f-44da-9257-8fc36c6bceec\") " pod="openshift-controller-manager/controller-manager-54dc747764-rnsdd" Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.944405 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d0e2e49-da4f-44da-9257-8fc36c6bceec-config\") pod \"controller-manager-54dc747764-rnsdd\" (UID: \"7d0e2e49-da4f-44da-9257-8fc36c6bceec\") " pod="openshift-controller-manager/controller-manager-54dc747764-rnsdd" Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.944469 5002 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.944923 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "11cba069-c2a4-4b6c-8e3e-38c76d27f20a" (UID: "11cba069-c2a4-4b6c-8e3e-38c76d27f20a"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.945221 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-config" (OuterVolumeSpecName: "config") pod "11cba069-c2a4-4b6c-8e3e-38c76d27f20a" (UID: "11cba069-c2a4-4b6c-8e3e-38c76d27f20a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.945629 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c0ed680-87c2-438b-aca7-b9fa1c19d414-client-ca" (OuterVolumeSpecName: "client-ca") pod "1c0ed680-87c2-438b-aca7-b9fa1c19d414" (UID: "1c0ed680-87c2-438b-aca7-b9fa1c19d414"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.945717 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c0ed680-87c2-438b-aca7-b9fa1c19d414-config" (OuterVolumeSpecName: "config") pod "1c0ed680-87c2-438b-aca7-b9fa1c19d414" (UID: "1c0ed680-87c2-438b-aca7-b9fa1c19d414"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.949301 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-kube-api-access-4c29m" (OuterVolumeSpecName: "kube-api-access-4c29m") pod "11cba069-c2a4-4b6c-8e3e-38c76d27f20a" (UID: "11cba069-c2a4-4b6c-8e3e-38c76d27f20a"). InnerVolumeSpecName "kube-api-access-4c29m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.949393 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c0ed680-87c2-438b-aca7-b9fa1c19d414-kube-api-access-hpnvt" (OuterVolumeSpecName: "kube-api-access-hpnvt") pod "1c0ed680-87c2-438b-aca7-b9fa1c19d414" (UID: "1c0ed680-87c2-438b-aca7-b9fa1c19d414"). InnerVolumeSpecName "kube-api-access-hpnvt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.949114 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c0ed680-87c2-438b-aca7-b9fa1c19d414-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1c0ed680-87c2-438b-aca7-b9fa1c19d414" (UID: "1c0ed680-87c2-438b-aca7-b9fa1c19d414"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.950097 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "11cba069-c2a4-4b6c-8e3e-38c76d27f20a" (UID: "11cba069-c2a4-4b6c-8e3e-38c76d27f20a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.950686 5002 scope.go:117] "RemoveContainer" containerID="65e3484e040003c7a1eeda15c15f9e3e3a72be6329f94876d35c259e81a3ce25" Sep 30 12:32:53 crc kubenswrapper[5002]: E0930 12:32:53.950968 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65e3484e040003c7a1eeda15c15f9e3e3a72be6329f94876d35c259e81a3ce25\": container with ID starting with 65e3484e040003c7a1eeda15c15f9e3e3a72be6329f94876d35c259e81a3ce25 not found: ID does not exist" containerID="65e3484e040003c7a1eeda15c15f9e3e3a72be6329f94876d35c259e81a3ce25" Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.950995 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65e3484e040003c7a1eeda15c15f9e3e3a72be6329f94876d35c259e81a3ce25"} err="failed to get container status \"65e3484e040003c7a1eeda15c15f9e3e3a72be6329f94876d35c259e81a3ce25\": rpc error: code = NotFound desc = could not find container \"65e3484e040003c7a1eeda15c15f9e3e3a72be6329f94876d35c259e81a3ce25\": container with ID starting with 65e3484e040003c7a1eeda15c15f9e3e3a72be6329f94876d35c259e81a3ce25 not found: ID does not exist" Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.951022 5002 scope.go:117] "RemoveContainer" containerID="0112a1ce6c04952e2d2116012abdb3d17afad7d81dc44c97efa2c65ac68abbaa" Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.986540 5002 scope.go:117] "RemoveContainer" containerID="0112a1ce6c04952e2d2116012abdb3d17afad7d81dc44c97efa2c65ac68abbaa" Sep 30 12:32:53 crc kubenswrapper[5002]: E0930 12:32:53.987157 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0112a1ce6c04952e2d2116012abdb3d17afad7d81dc44c97efa2c65ac68abbaa\": container with ID starting with 0112a1ce6c04952e2d2116012abdb3d17afad7d81dc44c97efa2c65ac68abbaa not found: ID does not exist" containerID="0112a1ce6c04952e2d2116012abdb3d17afad7d81dc44c97efa2c65ac68abbaa" Sep 30 12:32:53 crc kubenswrapper[5002]: I0930 12:32:53.987224 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0112a1ce6c04952e2d2116012abdb3d17afad7d81dc44c97efa2c65ac68abbaa"} err="failed to get container status \"0112a1ce6c04952e2d2116012abdb3d17afad7d81dc44c97efa2c65ac68abbaa\": rpc error: code = NotFound desc = could not find container \"0112a1ce6c04952e2d2116012abdb3d17afad7d81dc44c97efa2c65ac68abbaa\": container with ID starting with 0112a1ce6c04952e2d2116012abdb3d17afad7d81dc44c97efa2c65ac68abbaa not found: ID does not exist" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.045584 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7d0e2e49-da4f-44da-9257-8fc36c6bceec-client-ca\") pod \"controller-manager-54dc747764-rnsdd\" (UID: \"7d0e2e49-da4f-44da-9257-8fc36c6bceec\") " pod="openshift-controller-manager/controller-manager-54dc747764-rnsdd" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.045641 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7d0e2e49-da4f-44da-9257-8fc36c6bceec-proxy-ca-bundles\") pod \"controller-manager-54dc747764-rnsdd\" (UID: \"7d0e2e49-da4f-44da-9257-8fc36c6bceec\") " pod="openshift-controller-manager/controller-manager-54dc747764-rnsdd" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.045679 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d0e2e49-da4f-44da-9257-8fc36c6bceec-serving-cert\") pod \"controller-manager-54dc747764-rnsdd\" (UID: \"7d0e2e49-da4f-44da-9257-8fc36c6bceec\") " pod="openshift-controller-manager/controller-manager-54dc747764-rnsdd" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.045715 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wbph\" (UniqueName: \"kubernetes.io/projected/7d0e2e49-da4f-44da-9257-8fc36c6bceec-kube-api-access-9wbph\") pod \"controller-manager-54dc747764-rnsdd\" (UID: \"7d0e2e49-da4f-44da-9257-8fc36c6bceec\") " pod="openshift-controller-manager/controller-manager-54dc747764-rnsdd" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.045738 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d0e2e49-da4f-44da-9257-8fc36c6bceec-config\") pod \"controller-manager-54dc747764-rnsdd\" (UID: \"7d0e2e49-da4f-44da-9257-8fc36c6bceec\") " pod="openshift-controller-manager/controller-manager-54dc747764-rnsdd" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.045776 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.045788 5002 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1c0ed680-87c2-438b-aca7-b9fa1c19d414-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.045796 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4c29m\" (UniqueName: \"kubernetes.io/projected/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-kube-api-access-4c29m\") on node \"crc\" DevicePath \"\"" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.046067 5002 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.046080 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c0ed680-87c2-438b-aca7-b9fa1c19d414-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.046090 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11cba069-c2a4-4b6c-8e3e-38c76d27f20a-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.046098 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1c0ed680-87c2-438b-aca7-b9fa1c19d414-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.046105 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hpnvt\" (UniqueName: \"kubernetes.io/projected/1c0ed680-87c2-438b-aca7-b9fa1c19d414-kube-api-access-hpnvt\") on node \"crc\" DevicePath \"\"" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.046593 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7d0e2e49-da4f-44da-9257-8fc36c6bceec-client-ca\") pod \"controller-manager-54dc747764-rnsdd\" (UID: \"7d0e2e49-da4f-44da-9257-8fc36c6bceec\") " pod="openshift-controller-manager/controller-manager-54dc747764-rnsdd" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.046829 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7d0e2e49-da4f-44da-9257-8fc36c6bceec-proxy-ca-bundles\") pod \"controller-manager-54dc747764-rnsdd\" (UID: \"7d0e2e49-da4f-44da-9257-8fc36c6bceec\") " pod="openshift-controller-manager/controller-manager-54dc747764-rnsdd" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.047448 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d0e2e49-da4f-44da-9257-8fc36c6bceec-config\") pod \"controller-manager-54dc747764-rnsdd\" (UID: \"7d0e2e49-da4f-44da-9257-8fc36c6bceec\") " pod="openshift-controller-manager/controller-manager-54dc747764-rnsdd" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.050301 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d0e2e49-da4f-44da-9257-8fc36c6bceec-serving-cert\") pod \"controller-manager-54dc747764-rnsdd\" (UID: \"7d0e2e49-da4f-44da-9257-8fc36c6bceec\") " pod="openshift-controller-manager/controller-manager-54dc747764-rnsdd" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.070383 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wbph\" (UniqueName: \"kubernetes.io/projected/7d0e2e49-da4f-44da-9257-8fc36c6bceec-kube-api-access-9wbph\") pod \"controller-manager-54dc747764-rnsdd\" (UID: \"7d0e2e49-da4f-44da-9257-8fc36c6bceec\") " pod="openshift-controller-manager/controller-manager-54dc747764-rnsdd" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.166902 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-54dc747764-rnsdd" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.342671 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-p582f"] Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.352600 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-p582f"] Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.355007 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m"] Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.365111 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-57t2m"] Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.568284 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-54dc747764-rnsdd"] Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.589565 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-59bcc6685d-mgbm5"] Sep 30 12:32:54 crc kubenswrapper[5002]: E0930 12:32:54.589778 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c0ed680-87c2-438b-aca7-b9fa1c19d414" containerName="route-controller-manager" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.589789 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c0ed680-87c2-438b-aca7-b9fa1c19d414" containerName="route-controller-manager" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.589875 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c0ed680-87c2-438b-aca7-b9fa1c19d414" containerName="route-controller-manager" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.590226 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-59bcc6685d-mgbm5" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.591974 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.591975 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.594296 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.594514 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.594651 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.594764 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.601103 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-59bcc6685d-mgbm5"] Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.649969 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-54dc747764-rnsdd"] Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.684631 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11cba069-c2a4-4b6c-8e3e-38c76d27f20a" path="/var/lib/kubelet/pods/11cba069-c2a4-4b6c-8e3e-38c76d27f20a/volumes" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.685260 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c0ed680-87c2-438b-aca7-b9fa1c19d414" path="/var/lib/kubelet/pods/1c0ed680-87c2-438b-aca7-b9fa1c19d414/volumes" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.757301 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwlfm\" (UniqueName: \"kubernetes.io/projected/efc3a084-db4f-4222-9ee0-b4292a263bd3-kube-api-access-dwlfm\") pod \"route-controller-manager-59bcc6685d-mgbm5\" (UID: \"efc3a084-db4f-4222-9ee0-b4292a263bd3\") " pod="openshift-route-controller-manager/route-controller-manager-59bcc6685d-mgbm5" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.757351 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/efc3a084-db4f-4222-9ee0-b4292a263bd3-client-ca\") pod \"route-controller-manager-59bcc6685d-mgbm5\" (UID: \"efc3a084-db4f-4222-9ee0-b4292a263bd3\") " pod="openshift-route-controller-manager/route-controller-manager-59bcc6685d-mgbm5" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.757398 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/efc3a084-db4f-4222-9ee0-b4292a263bd3-serving-cert\") pod \"route-controller-manager-59bcc6685d-mgbm5\" (UID: \"efc3a084-db4f-4222-9ee0-b4292a263bd3\") " pod="openshift-route-controller-manager/route-controller-manager-59bcc6685d-mgbm5" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.757428 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efc3a084-db4f-4222-9ee0-b4292a263bd3-config\") pod \"route-controller-manager-59bcc6685d-mgbm5\" (UID: \"efc3a084-db4f-4222-9ee0-b4292a263bd3\") " pod="openshift-route-controller-manager/route-controller-manager-59bcc6685d-mgbm5" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.858638 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwlfm\" (UniqueName: \"kubernetes.io/projected/efc3a084-db4f-4222-9ee0-b4292a263bd3-kube-api-access-dwlfm\") pod \"route-controller-manager-59bcc6685d-mgbm5\" (UID: \"efc3a084-db4f-4222-9ee0-b4292a263bd3\") " pod="openshift-route-controller-manager/route-controller-manager-59bcc6685d-mgbm5" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.858718 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/efc3a084-db4f-4222-9ee0-b4292a263bd3-client-ca\") pod \"route-controller-manager-59bcc6685d-mgbm5\" (UID: \"efc3a084-db4f-4222-9ee0-b4292a263bd3\") " pod="openshift-route-controller-manager/route-controller-manager-59bcc6685d-mgbm5" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.858843 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/efc3a084-db4f-4222-9ee0-b4292a263bd3-serving-cert\") pod \"route-controller-manager-59bcc6685d-mgbm5\" (UID: \"efc3a084-db4f-4222-9ee0-b4292a263bd3\") " pod="openshift-route-controller-manager/route-controller-manager-59bcc6685d-mgbm5" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.858889 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efc3a084-db4f-4222-9ee0-b4292a263bd3-config\") pod \"route-controller-manager-59bcc6685d-mgbm5\" (UID: \"efc3a084-db4f-4222-9ee0-b4292a263bd3\") " pod="openshift-route-controller-manager/route-controller-manager-59bcc6685d-mgbm5" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.859677 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/efc3a084-db4f-4222-9ee0-b4292a263bd3-client-ca\") pod \"route-controller-manager-59bcc6685d-mgbm5\" (UID: \"efc3a084-db4f-4222-9ee0-b4292a263bd3\") " pod="openshift-route-controller-manager/route-controller-manager-59bcc6685d-mgbm5" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.860412 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efc3a084-db4f-4222-9ee0-b4292a263bd3-config\") pod \"route-controller-manager-59bcc6685d-mgbm5\" (UID: \"efc3a084-db4f-4222-9ee0-b4292a263bd3\") " pod="openshift-route-controller-manager/route-controller-manager-59bcc6685d-mgbm5" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.865962 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/efc3a084-db4f-4222-9ee0-b4292a263bd3-serving-cert\") pod \"route-controller-manager-59bcc6685d-mgbm5\" (UID: \"efc3a084-db4f-4222-9ee0-b4292a263bd3\") " pod="openshift-route-controller-manager/route-controller-manager-59bcc6685d-mgbm5" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.875639 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwlfm\" (UniqueName: \"kubernetes.io/projected/efc3a084-db4f-4222-9ee0-b4292a263bd3-kube-api-access-dwlfm\") pod \"route-controller-manager-59bcc6685d-mgbm5\" (UID: \"efc3a084-db4f-4222-9ee0-b4292a263bd3\") " pod="openshift-route-controller-manager/route-controller-manager-59bcc6685d-mgbm5" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.906419 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-59bcc6685d-mgbm5" Sep 30 12:32:54 crc kubenswrapper[5002]: I0930 12:32:54.966234 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-54dc747764-rnsdd" event={"ID":"7d0e2e49-da4f-44da-9257-8fc36c6bceec","Type":"ContainerStarted","Data":"8bfe028615342934aceecdcb3180a147218af1e0796aee731bf4ded5bd91c732"} Sep 30 12:32:55 crc kubenswrapper[5002]: I0930 12:32:55.161947 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-59bcc6685d-mgbm5"] Sep 30 12:32:55 crc kubenswrapper[5002]: W0930 12:32:55.168921 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podefc3a084_db4f_4222_9ee0_b4292a263bd3.slice/crio-72328ffb708b9163baedb823d4ac60636c66feb541a8bf2ee82007dbeb7329ab WatchSource:0}: Error finding container 72328ffb708b9163baedb823d4ac60636c66feb541a8bf2ee82007dbeb7329ab: Status 404 returned error can't find the container with id 72328ffb708b9163baedb823d4ac60636c66feb541a8bf2ee82007dbeb7329ab Sep 30 12:32:55 crc kubenswrapper[5002]: I0930 12:32:55.975383 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-59bcc6685d-mgbm5" event={"ID":"efc3a084-db4f-4222-9ee0-b4292a263bd3","Type":"ContainerStarted","Data":"2bb4d57305e5bc5507d4950646a6d4a74c7a8fb3e54128775a89ec9b75fb2db9"} Sep 30 12:32:55 crc kubenswrapper[5002]: I0930 12:32:55.975687 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-59bcc6685d-mgbm5" event={"ID":"efc3a084-db4f-4222-9ee0-b4292a263bd3","Type":"ContainerStarted","Data":"72328ffb708b9163baedb823d4ac60636c66feb541a8bf2ee82007dbeb7329ab"} Sep 30 12:32:55 crc kubenswrapper[5002]: I0930 12:32:55.975703 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-59bcc6685d-mgbm5" Sep 30 12:32:55 crc kubenswrapper[5002]: I0930 12:32:55.976935 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-54dc747764-rnsdd" event={"ID":"7d0e2e49-da4f-44da-9257-8fc36c6bceec","Type":"ContainerStarted","Data":"bbaa2d000bde10939833690f48229e316a24ce27164ce56b8b1e60886318acdf"} Sep 30 12:32:55 crc kubenswrapper[5002]: I0930 12:32:55.977029 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-54dc747764-rnsdd" podUID="7d0e2e49-da4f-44da-9257-8fc36c6bceec" containerName="controller-manager" containerID="cri-o://bbaa2d000bde10939833690f48229e316a24ce27164ce56b8b1e60886318acdf" gracePeriod=30 Sep 30 12:32:55 crc kubenswrapper[5002]: I0930 12:32:55.977163 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-54dc747764-rnsdd" Sep 30 12:32:55 crc kubenswrapper[5002]: I0930 12:32:55.982350 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-54dc747764-rnsdd" Sep 30 12:32:55 crc kubenswrapper[5002]: I0930 12:32:55.994653 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-59bcc6685d-mgbm5" podStartSLOduration=1.9946370199999999 podStartE2EDuration="1.99463702s" podCreationTimestamp="2025-09-30 12:32:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:32:55.992131543 +0000 UTC m=+750.241813699" watchObservedRunningTime="2025-09-30 12:32:55.99463702 +0000 UTC m=+750.244319156" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.012129 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-54dc747764-rnsdd" podStartSLOduration=4.012111368 podStartE2EDuration="4.012111368s" podCreationTimestamp="2025-09-30 12:32:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:32:56.009517519 +0000 UTC m=+750.259199675" watchObservedRunningTime="2025-09-30 12:32:56.012111368 +0000 UTC m=+750.261793514" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.392443 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-54dc747764-rnsdd" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.399894 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-59bcc6685d-mgbm5" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.428160 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-b64c8d469-s4j79"] Sep 30 12:32:56 crc kubenswrapper[5002]: E0930 12:32:56.428370 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d0e2e49-da4f-44da-9257-8fc36c6bceec" containerName="controller-manager" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.428382 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d0e2e49-da4f-44da-9257-8fc36c6bceec" containerName="controller-manager" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.428487 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d0e2e49-da4f-44da-9257-8fc36c6bceec" containerName="controller-manager" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.428822 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-b64c8d469-s4j79" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.453376 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-b64c8d469-s4j79"] Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.476831 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d0e2e49-da4f-44da-9257-8fc36c6bceec-serving-cert\") pod \"7d0e2e49-da4f-44da-9257-8fc36c6bceec\" (UID: \"7d0e2e49-da4f-44da-9257-8fc36c6bceec\") " Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.476892 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7d0e2e49-da4f-44da-9257-8fc36c6bceec-proxy-ca-bundles\") pod \"7d0e2e49-da4f-44da-9257-8fc36c6bceec\" (UID: \"7d0e2e49-da4f-44da-9257-8fc36c6bceec\") " Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.476980 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d0e2e49-da4f-44da-9257-8fc36c6bceec-config\") pod \"7d0e2e49-da4f-44da-9257-8fc36c6bceec\" (UID: \"7d0e2e49-da4f-44da-9257-8fc36c6bceec\") " Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.477010 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7d0e2e49-da4f-44da-9257-8fc36c6bceec-client-ca\") pod \"7d0e2e49-da4f-44da-9257-8fc36c6bceec\" (UID: \"7d0e2e49-da4f-44da-9257-8fc36c6bceec\") " Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.477031 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9wbph\" (UniqueName: \"kubernetes.io/projected/7d0e2e49-da4f-44da-9257-8fc36c6bceec-kube-api-access-9wbph\") pod \"7d0e2e49-da4f-44da-9257-8fc36c6bceec\" (UID: \"7d0e2e49-da4f-44da-9257-8fc36c6bceec\") " Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.478489 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d0e2e49-da4f-44da-9257-8fc36c6bceec-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7d0e2e49-da4f-44da-9257-8fc36c6bceec" (UID: "7d0e2e49-da4f-44da-9257-8fc36c6bceec"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.478520 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d0e2e49-da4f-44da-9257-8fc36c6bceec-config" (OuterVolumeSpecName: "config") pod "7d0e2e49-da4f-44da-9257-8fc36c6bceec" (UID: "7d0e2e49-da4f-44da-9257-8fc36c6bceec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.478758 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d0e2e49-da4f-44da-9257-8fc36c6bceec-client-ca" (OuterVolumeSpecName: "client-ca") pod "7d0e2e49-da4f-44da-9257-8fc36c6bceec" (UID: "7d0e2e49-da4f-44da-9257-8fc36c6bceec"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.482357 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d0e2e49-da4f-44da-9257-8fc36c6bceec-kube-api-access-9wbph" (OuterVolumeSpecName: "kube-api-access-9wbph") pod "7d0e2e49-da4f-44da-9257-8fc36c6bceec" (UID: "7d0e2e49-da4f-44da-9257-8fc36c6bceec"). InnerVolumeSpecName "kube-api-access-9wbph". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.482570 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d0e2e49-da4f-44da-9257-8fc36c6bceec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7d0e2e49-da4f-44da-9257-8fc36c6bceec" (UID: "7d0e2e49-da4f-44da-9257-8fc36c6bceec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.578392 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wbrr\" (UniqueName: \"kubernetes.io/projected/945c4609-4de0-4c15-8613-bfd4a11c454c-kube-api-access-8wbrr\") pod \"controller-manager-b64c8d469-s4j79\" (UID: \"945c4609-4de0-4c15-8613-bfd4a11c454c\") " pod="openshift-controller-manager/controller-manager-b64c8d469-s4j79" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.578450 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/945c4609-4de0-4c15-8613-bfd4a11c454c-proxy-ca-bundles\") pod \"controller-manager-b64c8d469-s4j79\" (UID: \"945c4609-4de0-4c15-8613-bfd4a11c454c\") " pod="openshift-controller-manager/controller-manager-b64c8d469-s4j79" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.578656 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/945c4609-4de0-4c15-8613-bfd4a11c454c-serving-cert\") pod \"controller-manager-b64c8d469-s4j79\" (UID: \"945c4609-4de0-4c15-8613-bfd4a11c454c\") " pod="openshift-controller-manager/controller-manager-b64c8d469-s4j79" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.578712 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/945c4609-4de0-4c15-8613-bfd4a11c454c-client-ca\") pod \"controller-manager-b64c8d469-s4j79\" (UID: \"945c4609-4de0-4c15-8613-bfd4a11c454c\") " pod="openshift-controller-manager/controller-manager-b64c8d469-s4j79" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.578732 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/945c4609-4de0-4c15-8613-bfd4a11c454c-config\") pod \"controller-manager-b64c8d469-s4j79\" (UID: \"945c4609-4de0-4c15-8613-bfd4a11c454c\") " pod="openshift-controller-manager/controller-manager-b64c8d469-s4j79" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.578825 5002 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7d0e2e49-da4f-44da-9257-8fc36c6bceec-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.578838 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9wbph\" (UniqueName: \"kubernetes.io/projected/7d0e2e49-da4f-44da-9257-8fc36c6bceec-kube-api-access-9wbph\") on node \"crc\" DevicePath \"\"" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.578848 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d0e2e49-da4f-44da-9257-8fc36c6bceec-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.578857 5002 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7d0e2e49-da4f-44da-9257-8fc36c6bceec-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.578866 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d0e2e49-da4f-44da-9257-8fc36c6bceec-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.683297 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/945c4609-4de0-4c15-8613-bfd4a11c454c-serving-cert\") pod \"controller-manager-b64c8d469-s4j79\" (UID: \"945c4609-4de0-4c15-8613-bfd4a11c454c\") " pod="openshift-controller-manager/controller-manager-b64c8d469-s4j79" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.683403 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/945c4609-4de0-4c15-8613-bfd4a11c454c-client-ca\") pod \"controller-manager-b64c8d469-s4j79\" (UID: \"945c4609-4de0-4c15-8613-bfd4a11c454c\") " pod="openshift-controller-manager/controller-manager-b64c8d469-s4j79" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.683454 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/945c4609-4de0-4c15-8613-bfd4a11c454c-config\") pod \"controller-manager-b64c8d469-s4j79\" (UID: \"945c4609-4de0-4c15-8613-bfd4a11c454c\") " pod="openshift-controller-manager/controller-manager-b64c8d469-s4j79" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.683561 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wbrr\" (UniqueName: \"kubernetes.io/projected/945c4609-4de0-4c15-8613-bfd4a11c454c-kube-api-access-8wbrr\") pod \"controller-manager-b64c8d469-s4j79\" (UID: \"945c4609-4de0-4c15-8613-bfd4a11c454c\") " pod="openshift-controller-manager/controller-manager-b64c8d469-s4j79" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.683621 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/945c4609-4de0-4c15-8613-bfd4a11c454c-proxy-ca-bundles\") pod \"controller-manager-b64c8d469-s4j79\" (UID: \"945c4609-4de0-4c15-8613-bfd4a11c454c\") " pod="openshift-controller-manager/controller-manager-b64c8d469-s4j79" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.688804 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/945c4609-4de0-4c15-8613-bfd4a11c454c-proxy-ca-bundles\") pod \"controller-manager-b64c8d469-s4j79\" (UID: \"945c4609-4de0-4c15-8613-bfd4a11c454c\") " pod="openshift-controller-manager/controller-manager-b64c8d469-s4j79" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.692727 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/945c4609-4de0-4c15-8613-bfd4a11c454c-client-ca\") pod \"controller-manager-b64c8d469-s4j79\" (UID: \"945c4609-4de0-4c15-8613-bfd4a11c454c\") " pod="openshift-controller-manager/controller-manager-b64c8d469-s4j79" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.693565 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/945c4609-4de0-4c15-8613-bfd4a11c454c-config\") pod \"controller-manager-b64c8d469-s4j79\" (UID: \"945c4609-4de0-4c15-8613-bfd4a11c454c\") " pod="openshift-controller-manager/controller-manager-b64c8d469-s4j79" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.695253 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/945c4609-4de0-4c15-8613-bfd4a11c454c-serving-cert\") pod \"controller-manager-b64c8d469-s4j79\" (UID: \"945c4609-4de0-4c15-8613-bfd4a11c454c\") " pod="openshift-controller-manager/controller-manager-b64c8d469-s4j79" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.709119 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wbrr\" (UniqueName: \"kubernetes.io/projected/945c4609-4de0-4c15-8613-bfd4a11c454c-kube-api-access-8wbrr\") pod \"controller-manager-b64c8d469-s4j79\" (UID: \"945c4609-4de0-4c15-8613-bfd4a11c454c\") " pod="openshift-controller-manager/controller-manager-b64c8d469-s4j79" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.745601 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-b64c8d469-s4j79" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.983995 5002 generic.go:334] "Generic (PLEG): container finished" podID="7d0e2e49-da4f-44da-9257-8fc36c6bceec" containerID="bbaa2d000bde10939833690f48229e316a24ce27164ce56b8b1e60886318acdf" exitCode=0 Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.984110 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-54dc747764-rnsdd" Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.984152 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-54dc747764-rnsdd" event={"ID":"7d0e2e49-da4f-44da-9257-8fc36c6bceec","Type":"ContainerDied","Data":"bbaa2d000bde10939833690f48229e316a24ce27164ce56b8b1e60886318acdf"} Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.984184 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-54dc747764-rnsdd" event={"ID":"7d0e2e49-da4f-44da-9257-8fc36c6bceec","Type":"ContainerDied","Data":"8bfe028615342934aceecdcb3180a147218af1e0796aee731bf4ded5bd91c732"} Sep 30 12:32:56 crc kubenswrapper[5002]: I0930 12:32:56.984200 5002 scope.go:117] "RemoveContainer" containerID="bbaa2d000bde10939833690f48229e316a24ce27164ce56b8b1e60886318acdf" Sep 30 12:32:57 crc kubenswrapper[5002]: I0930 12:32:57.007787 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-54dc747764-rnsdd"] Sep 30 12:32:57 crc kubenswrapper[5002]: I0930 12:32:57.011089 5002 scope.go:117] "RemoveContainer" containerID="bbaa2d000bde10939833690f48229e316a24ce27164ce56b8b1e60886318acdf" Sep 30 12:32:57 crc kubenswrapper[5002]: E0930 12:32:57.011439 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bbaa2d000bde10939833690f48229e316a24ce27164ce56b8b1e60886318acdf\": container with ID starting with bbaa2d000bde10939833690f48229e316a24ce27164ce56b8b1e60886318acdf not found: ID does not exist" containerID="bbaa2d000bde10939833690f48229e316a24ce27164ce56b8b1e60886318acdf" Sep 30 12:32:57 crc kubenswrapper[5002]: I0930 12:32:57.011495 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbaa2d000bde10939833690f48229e316a24ce27164ce56b8b1e60886318acdf"} err="failed to get container status \"bbaa2d000bde10939833690f48229e316a24ce27164ce56b8b1e60886318acdf\": rpc error: code = NotFound desc = could not find container \"bbaa2d000bde10939833690f48229e316a24ce27164ce56b8b1e60886318acdf\": container with ID starting with bbaa2d000bde10939833690f48229e316a24ce27164ce56b8b1e60886318acdf not found: ID does not exist" Sep 30 12:32:57 crc kubenswrapper[5002]: I0930 12:32:57.012815 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-54dc747764-rnsdd"] Sep 30 12:32:57 crc kubenswrapper[5002]: I0930 12:32:57.180159 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-b64c8d469-s4j79"] Sep 30 12:32:57 crc kubenswrapper[5002]: I0930 12:32:57.993645 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-b64c8d469-s4j79" event={"ID":"945c4609-4de0-4c15-8613-bfd4a11c454c","Type":"ContainerStarted","Data":"f573f654be63fe70c45e69ca4c32299d1134caf8a1fcc0973171231ccf580494"} Sep 30 12:32:57 crc kubenswrapper[5002]: I0930 12:32:57.993938 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-b64c8d469-s4j79" event={"ID":"945c4609-4de0-4c15-8613-bfd4a11c454c","Type":"ContainerStarted","Data":"dd12da8cc89c24e2c3c197ca5a3b53fbe440f72a99db464ce5b7af86df7dec94"} Sep 30 12:32:57 crc kubenswrapper[5002]: I0930 12:32:57.993958 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-b64c8d469-s4j79" Sep 30 12:32:57 crc kubenswrapper[5002]: I0930 12:32:57.999856 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-b64c8d469-s4j79" Sep 30 12:32:58 crc kubenswrapper[5002]: I0930 12:32:58.016081 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-b64c8d469-s4j79" podStartSLOduration=4.016056613 podStartE2EDuration="4.016056613s" podCreationTimestamp="2025-09-30 12:32:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:32:58.012779336 +0000 UTC m=+752.262461482" watchObservedRunningTime="2025-09-30 12:32:58.016056613 +0000 UTC m=+752.265738769" Sep 30 12:32:58 crc kubenswrapper[5002]: I0930 12:32:58.685344 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d0e2e49-da4f-44da-9257-8fc36c6bceec" path="/var/lib/kubelet/pods/7d0e2e49-da4f-44da-9257-8fc36c6bceec/volumes" Sep 30 12:33:02 crc kubenswrapper[5002]: I0930 12:33:02.098030 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:33:02 crc kubenswrapper[5002]: I0930 12:33:02.098349 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:33:02 crc kubenswrapper[5002]: I0930 12:33:02.098403 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" Sep 30 12:33:02 crc kubenswrapper[5002]: I0930 12:33:02.099124 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"192b85eb900ab298ef92f87ae6539f9cf20c972d71035d71486bd4ab14bc6108"} pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 12:33:02 crc kubenswrapper[5002]: I0930 12:33:02.099212 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" containerID="cri-o://192b85eb900ab298ef92f87ae6539f9cf20c972d71035d71486bd4ab14bc6108" gracePeriod=600 Sep 30 12:33:03 crc kubenswrapper[5002]: I0930 12:33:03.024501 5002 generic.go:334] "Generic (PLEG): container finished" podID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerID="192b85eb900ab298ef92f87ae6539f9cf20c972d71035d71486bd4ab14bc6108" exitCode=0 Sep 30 12:33:03 crc kubenswrapper[5002]: I0930 12:33:03.024561 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerDied","Data":"192b85eb900ab298ef92f87ae6539f9cf20c972d71035d71486bd4ab14bc6108"} Sep 30 12:33:03 crc kubenswrapper[5002]: I0930 12:33:03.025098 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerStarted","Data":"c1160f56b0e9b4bf2e03936ba3d26a4bdb21744142f5cdbd024372eb9b5cd6dd"} Sep 30 12:33:03 crc kubenswrapper[5002]: I0930 12:33:03.025135 5002 scope.go:117] "RemoveContainer" containerID="2265b6a0e732448a595146d842013f7245c5b783b761a0762decdaca959b8f6c" Sep 30 12:33:04 crc kubenswrapper[5002]: I0930 12:33:04.109720 5002 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 30 12:33:11 crc kubenswrapper[5002]: I0930 12:33:11.565509 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-576d687654-bw9lz" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.250017 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-fxsmq"] Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.252741 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-fxsmq" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.255445 5002 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.255502 5002 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-svrwz" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.256763 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.261778 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-j499w"] Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.262797 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-j499w" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.264901 5002 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.277222 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-j499w"] Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.350881 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-5d688f5ffc-4hv4v"] Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.352331 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-4hv4v" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.354058 5002 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.354795 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-tq2q9"] Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.355977 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-tq2q9" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.361051 5002 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.361127 5002 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-ll7nd" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.361855 5002 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.363085 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.368537 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-4hv4v"] Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.383153 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/66fde16c-f197-4399-bef6-7ea1d7d41611-metrics\") pod \"frr-k8s-fxsmq\" (UID: \"66fde16c-f197-4399-bef6-7ea1d7d41611\") " pod="metallb-system/frr-k8s-fxsmq" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.383223 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/66fde16c-f197-4399-bef6-7ea1d7d41611-frr-conf\") pod \"frr-k8s-fxsmq\" (UID: \"66fde16c-f197-4399-bef6-7ea1d7d41611\") " pod="metallb-system/frr-k8s-fxsmq" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.383260 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/66fde16c-f197-4399-bef6-7ea1d7d41611-metrics-certs\") pod \"frr-k8s-fxsmq\" (UID: \"66fde16c-f197-4399-bef6-7ea1d7d41611\") " pod="metallb-system/frr-k8s-fxsmq" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.383282 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-428hl\" (UniqueName: \"kubernetes.io/projected/94a678f1-7504-4246-9ce4-23886b1a3623-kube-api-access-428hl\") pod \"frr-k8s-webhook-server-5478bdb765-j499w\" (UID: \"94a678f1-7504-4246-9ce4-23886b1a3623\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-j499w" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.383303 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/66fde16c-f197-4399-bef6-7ea1d7d41611-frr-startup\") pod \"frr-k8s-fxsmq\" (UID: \"66fde16c-f197-4399-bef6-7ea1d7d41611\") " pod="metallb-system/frr-k8s-fxsmq" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.383320 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/66fde16c-f197-4399-bef6-7ea1d7d41611-reloader\") pod \"frr-k8s-fxsmq\" (UID: \"66fde16c-f197-4399-bef6-7ea1d7d41611\") " pod="metallb-system/frr-k8s-fxsmq" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.383364 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/66fde16c-f197-4399-bef6-7ea1d7d41611-frr-sockets\") pod \"frr-k8s-fxsmq\" (UID: \"66fde16c-f197-4399-bef6-7ea1d7d41611\") " pod="metallb-system/frr-k8s-fxsmq" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.383386 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wd784\" (UniqueName: \"kubernetes.io/projected/66fde16c-f197-4399-bef6-7ea1d7d41611-kube-api-access-wd784\") pod \"frr-k8s-fxsmq\" (UID: \"66fde16c-f197-4399-bef6-7ea1d7d41611\") " pod="metallb-system/frr-k8s-fxsmq" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.383403 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/94a678f1-7504-4246-9ce4-23886b1a3623-cert\") pod \"frr-k8s-webhook-server-5478bdb765-j499w\" (UID: \"94a678f1-7504-4246-9ce4-23886b1a3623\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-j499w" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.484215 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/bae6d903-934d-4f98-9924-805cc9b20d5c-metallb-excludel2\") pod \"speaker-tq2q9\" (UID: \"bae6d903-934d-4f98-9924-805cc9b20d5c\") " pod="metallb-system/speaker-tq2q9" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.484268 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9381f980-e2fe-4cf6-97ad-93757413f357-metrics-certs\") pod \"controller-5d688f5ffc-4hv4v\" (UID: \"9381f980-e2fe-4cf6-97ad-93757413f357\") " pod="metallb-system/controller-5d688f5ffc-4hv4v" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.484348 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/66fde16c-f197-4399-bef6-7ea1d7d41611-metrics-certs\") pod \"frr-k8s-fxsmq\" (UID: \"66fde16c-f197-4399-bef6-7ea1d7d41611\") " pod="metallb-system/frr-k8s-fxsmq" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.484435 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-428hl\" (UniqueName: \"kubernetes.io/projected/94a678f1-7504-4246-9ce4-23886b1a3623-kube-api-access-428hl\") pod \"frr-k8s-webhook-server-5478bdb765-j499w\" (UID: \"94a678f1-7504-4246-9ce4-23886b1a3623\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-j499w" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.484458 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/66fde16c-f197-4399-bef6-7ea1d7d41611-frr-startup\") pod \"frr-k8s-fxsmq\" (UID: \"66fde16c-f197-4399-bef6-7ea1d7d41611\") " pod="metallb-system/frr-k8s-fxsmq" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.484496 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/66fde16c-f197-4399-bef6-7ea1d7d41611-reloader\") pod \"frr-k8s-fxsmq\" (UID: \"66fde16c-f197-4399-bef6-7ea1d7d41611\") " pod="metallb-system/frr-k8s-fxsmq" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.484536 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bae6d903-934d-4f98-9924-805cc9b20d5c-metrics-certs\") pod \"speaker-tq2q9\" (UID: \"bae6d903-934d-4f98-9924-805cc9b20d5c\") " pod="metallb-system/speaker-tq2q9" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.484567 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/66fde16c-f197-4399-bef6-7ea1d7d41611-frr-sockets\") pod \"frr-k8s-fxsmq\" (UID: \"66fde16c-f197-4399-bef6-7ea1d7d41611\") " pod="metallb-system/frr-k8s-fxsmq" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.484603 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wd784\" (UniqueName: \"kubernetes.io/projected/66fde16c-f197-4399-bef6-7ea1d7d41611-kube-api-access-wd784\") pod \"frr-k8s-fxsmq\" (UID: \"66fde16c-f197-4399-bef6-7ea1d7d41611\") " pod="metallb-system/frr-k8s-fxsmq" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.484630 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/94a678f1-7504-4246-9ce4-23886b1a3623-cert\") pod \"frr-k8s-webhook-server-5478bdb765-j499w\" (UID: \"94a678f1-7504-4246-9ce4-23886b1a3623\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-j499w" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.484672 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96x2b\" (UniqueName: \"kubernetes.io/projected/bae6d903-934d-4f98-9924-805cc9b20d5c-kube-api-access-96x2b\") pod \"speaker-tq2q9\" (UID: \"bae6d903-934d-4f98-9924-805cc9b20d5c\") " pod="metallb-system/speaker-tq2q9" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.484701 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/66fde16c-f197-4399-bef6-7ea1d7d41611-metrics\") pod \"frr-k8s-fxsmq\" (UID: \"66fde16c-f197-4399-bef6-7ea1d7d41611\") " pod="metallb-system/frr-k8s-fxsmq" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.484776 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/bae6d903-934d-4f98-9924-805cc9b20d5c-memberlist\") pod \"speaker-tq2q9\" (UID: \"bae6d903-934d-4f98-9924-805cc9b20d5c\") " pod="metallb-system/speaker-tq2q9" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.484802 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9381f980-e2fe-4cf6-97ad-93757413f357-cert\") pod \"controller-5d688f5ffc-4hv4v\" (UID: \"9381f980-e2fe-4cf6-97ad-93757413f357\") " pod="metallb-system/controller-5d688f5ffc-4hv4v" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.484827 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/66fde16c-f197-4399-bef6-7ea1d7d41611-frr-conf\") pod \"frr-k8s-fxsmq\" (UID: \"66fde16c-f197-4399-bef6-7ea1d7d41611\") " pod="metallb-system/frr-k8s-fxsmq" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.484859 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9bwl\" (UniqueName: \"kubernetes.io/projected/9381f980-e2fe-4cf6-97ad-93757413f357-kube-api-access-f9bwl\") pod \"controller-5d688f5ffc-4hv4v\" (UID: \"9381f980-e2fe-4cf6-97ad-93757413f357\") " pod="metallb-system/controller-5d688f5ffc-4hv4v" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.485761 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/66fde16c-f197-4399-bef6-7ea1d7d41611-metrics\") pod \"frr-k8s-fxsmq\" (UID: \"66fde16c-f197-4399-bef6-7ea1d7d41611\") " pod="metallb-system/frr-k8s-fxsmq" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.486120 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/66fde16c-f197-4399-bef6-7ea1d7d41611-frr-startup\") pod \"frr-k8s-fxsmq\" (UID: \"66fde16c-f197-4399-bef6-7ea1d7d41611\") " pod="metallb-system/frr-k8s-fxsmq" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.490128 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/66fde16c-f197-4399-bef6-7ea1d7d41611-metrics-certs\") pod \"frr-k8s-fxsmq\" (UID: \"66fde16c-f197-4399-bef6-7ea1d7d41611\") " pod="metallb-system/frr-k8s-fxsmq" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.490841 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/94a678f1-7504-4246-9ce4-23886b1a3623-cert\") pod \"frr-k8s-webhook-server-5478bdb765-j499w\" (UID: \"94a678f1-7504-4246-9ce4-23886b1a3623\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-j499w" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.493712 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/66fde16c-f197-4399-bef6-7ea1d7d41611-frr-sockets\") pod \"frr-k8s-fxsmq\" (UID: \"66fde16c-f197-4399-bef6-7ea1d7d41611\") " pod="metallb-system/frr-k8s-fxsmq" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.493738 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/66fde16c-f197-4399-bef6-7ea1d7d41611-frr-conf\") pod \"frr-k8s-fxsmq\" (UID: \"66fde16c-f197-4399-bef6-7ea1d7d41611\") " pod="metallb-system/frr-k8s-fxsmq" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.493766 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/66fde16c-f197-4399-bef6-7ea1d7d41611-reloader\") pod \"frr-k8s-fxsmq\" (UID: \"66fde16c-f197-4399-bef6-7ea1d7d41611\") " pod="metallb-system/frr-k8s-fxsmq" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.501381 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-428hl\" (UniqueName: \"kubernetes.io/projected/94a678f1-7504-4246-9ce4-23886b1a3623-kube-api-access-428hl\") pod \"frr-k8s-webhook-server-5478bdb765-j499w\" (UID: \"94a678f1-7504-4246-9ce4-23886b1a3623\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-j499w" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.510637 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wd784\" (UniqueName: \"kubernetes.io/projected/66fde16c-f197-4399-bef6-7ea1d7d41611-kube-api-access-wd784\") pod \"frr-k8s-fxsmq\" (UID: \"66fde16c-f197-4399-bef6-7ea1d7d41611\") " pod="metallb-system/frr-k8s-fxsmq" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.574810 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-fxsmq" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.586345 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bae6d903-934d-4f98-9924-805cc9b20d5c-metrics-certs\") pod \"speaker-tq2q9\" (UID: \"bae6d903-934d-4f98-9924-805cc9b20d5c\") " pod="metallb-system/speaker-tq2q9" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.586411 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96x2b\" (UniqueName: \"kubernetes.io/projected/bae6d903-934d-4f98-9924-805cc9b20d5c-kube-api-access-96x2b\") pod \"speaker-tq2q9\" (UID: \"bae6d903-934d-4f98-9924-805cc9b20d5c\") " pod="metallb-system/speaker-tq2q9" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.586465 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/bae6d903-934d-4f98-9924-805cc9b20d5c-memberlist\") pod \"speaker-tq2q9\" (UID: \"bae6d903-934d-4f98-9924-805cc9b20d5c\") " pod="metallb-system/speaker-tq2q9" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.586531 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9381f980-e2fe-4cf6-97ad-93757413f357-cert\") pod \"controller-5d688f5ffc-4hv4v\" (UID: \"9381f980-e2fe-4cf6-97ad-93757413f357\") " pod="metallb-system/controller-5d688f5ffc-4hv4v" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.586560 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9bwl\" (UniqueName: \"kubernetes.io/projected/9381f980-e2fe-4cf6-97ad-93757413f357-kube-api-access-f9bwl\") pod \"controller-5d688f5ffc-4hv4v\" (UID: \"9381f980-e2fe-4cf6-97ad-93757413f357\") " pod="metallb-system/controller-5d688f5ffc-4hv4v" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.586596 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/bae6d903-934d-4f98-9924-805cc9b20d5c-metallb-excludel2\") pod \"speaker-tq2q9\" (UID: \"bae6d903-934d-4f98-9924-805cc9b20d5c\") " pod="metallb-system/speaker-tq2q9" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.586622 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9381f980-e2fe-4cf6-97ad-93757413f357-metrics-certs\") pod \"controller-5d688f5ffc-4hv4v\" (UID: \"9381f980-e2fe-4cf6-97ad-93757413f357\") " pod="metallb-system/controller-5d688f5ffc-4hv4v" Sep 30 12:33:12 crc kubenswrapper[5002]: E0930 12:33:12.587007 5002 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 30 12:33:12 crc kubenswrapper[5002]: E0930 12:33:12.587068 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bae6d903-934d-4f98-9924-805cc9b20d5c-memberlist podName:bae6d903-934d-4f98-9924-805cc9b20d5c nodeName:}" failed. No retries permitted until 2025-09-30 12:33:13.087048335 +0000 UTC m=+767.336730491 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/bae6d903-934d-4f98-9924-805cc9b20d5c-memberlist") pod "speaker-tq2q9" (UID: "bae6d903-934d-4f98-9924-805cc9b20d5c") : secret "metallb-memberlist" not found Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.587757 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/bae6d903-934d-4f98-9924-805cc9b20d5c-metallb-excludel2\") pod \"speaker-tq2q9\" (UID: \"bae6d903-934d-4f98-9924-805cc9b20d5c\") " pod="metallb-system/speaker-tq2q9" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.590702 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9381f980-e2fe-4cf6-97ad-93757413f357-cert\") pod \"controller-5d688f5ffc-4hv4v\" (UID: \"9381f980-e2fe-4cf6-97ad-93757413f357\") " pod="metallb-system/controller-5d688f5ffc-4hv4v" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.591164 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9381f980-e2fe-4cf6-97ad-93757413f357-metrics-certs\") pod \"controller-5d688f5ffc-4hv4v\" (UID: \"9381f980-e2fe-4cf6-97ad-93757413f357\") " pod="metallb-system/controller-5d688f5ffc-4hv4v" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.594226 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bae6d903-934d-4f98-9924-805cc9b20d5c-metrics-certs\") pod \"speaker-tq2q9\" (UID: \"bae6d903-934d-4f98-9924-805cc9b20d5c\") " pod="metallb-system/speaker-tq2q9" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.604439 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96x2b\" (UniqueName: \"kubernetes.io/projected/bae6d903-934d-4f98-9924-805cc9b20d5c-kube-api-access-96x2b\") pod \"speaker-tq2q9\" (UID: \"bae6d903-934d-4f98-9924-805cc9b20d5c\") " pod="metallb-system/speaker-tq2q9" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.623013 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9bwl\" (UniqueName: \"kubernetes.io/projected/9381f980-e2fe-4cf6-97ad-93757413f357-kube-api-access-f9bwl\") pod \"controller-5d688f5ffc-4hv4v\" (UID: \"9381f980-e2fe-4cf6-97ad-93757413f357\") " pod="metallb-system/controller-5d688f5ffc-4hv4v" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.640991 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-j499w" Sep 30 12:33:12 crc kubenswrapper[5002]: I0930 12:33:12.688725 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-4hv4v" Sep 30 12:33:13 crc kubenswrapper[5002]: I0930 12:33:13.083324 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-j499w"] Sep 30 12:33:13 crc kubenswrapper[5002]: W0930 12:33:13.087428 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod94a678f1_7504_4246_9ce4_23886b1a3623.slice/crio-7a1f38dc0e2a99c04e90ce44fe5dc6d386f779dad14626e4ea856fdde7cdcb16 WatchSource:0}: Error finding container 7a1f38dc0e2a99c04e90ce44fe5dc6d386f779dad14626e4ea856fdde7cdcb16: Status 404 returned error can't find the container with id 7a1f38dc0e2a99c04e90ce44fe5dc6d386f779dad14626e4ea856fdde7cdcb16 Sep 30 12:33:13 crc kubenswrapper[5002]: I0930 12:33:13.093465 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-j499w" event={"ID":"94a678f1-7504-4246-9ce4-23886b1a3623","Type":"ContainerStarted","Data":"7a1f38dc0e2a99c04e90ce44fe5dc6d386f779dad14626e4ea856fdde7cdcb16"} Sep 30 12:33:13 crc kubenswrapper[5002]: I0930 12:33:13.095101 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/bae6d903-934d-4f98-9924-805cc9b20d5c-memberlist\") pod \"speaker-tq2q9\" (UID: \"bae6d903-934d-4f98-9924-805cc9b20d5c\") " pod="metallb-system/speaker-tq2q9" Sep 30 12:33:13 crc kubenswrapper[5002]: E0930 12:33:13.095324 5002 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 30 12:33:13 crc kubenswrapper[5002]: E0930 12:33:13.095397 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bae6d903-934d-4f98-9924-805cc9b20d5c-memberlist podName:bae6d903-934d-4f98-9924-805cc9b20d5c nodeName:}" failed. No retries permitted until 2025-09-30 12:33:14.095377147 +0000 UTC m=+768.345059293 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/bae6d903-934d-4f98-9924-805cc9b20d5c-memberlist") pod "speaker-tq2q9" (UID: "bae6d903-934d-4f98-9924-805cc9b20d5c") : secret "metallb-memberlist" not found Sep 30 12:33:13 crc kubenswrapper[5002]: I0930 12:33:13.095425 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fxsmq" event={"ID":"66fde16c-f197-4399-bef6-7ea1d7d41611","Type":"ContainerStarted","Data":"b479ce7a9eaa7973280f63843b8501a0e8c735a5b535217fde2843673f08b11d"} Sep 30 12:33:13 crc kubenswrapper[5002]: I0930 12:33:13.175773 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-4hv4v"] Sep 30 12:33:13 crc kubenswrapper[5002]: W0930 12:33:13.182030 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9381f980_e2fe_4cf6_97ad_93757413f357.slice/crio-f2637f2a33570eebf816d672aa7a432c598eec3a0ace1011ff48aeca9bc0316c WatchSource:0}: Error finding container f2637f2a33570eebf816d672aa7a432c598eec3a0ace1011ff48aeca9bc0316c: Status 404 returned error can't find the container with id f2637f2a33570eebf816d672aa7a432c598eec3a0ace1011ff48aeca9bc0316c Sep 30 12:33:14 crc kubenswrapper[5002]: I0930 12:33:14.102790 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-4hv4v" event={"ID":"9381f980-e2fe-4cf6-97ad-93757413f357","Type":"ContainerStarted","Data":"52babb4a6c04bb55e0699dd61b4ff797acec4607281bbb4803b0064f03ff8e3a"} Sep 30 12:33:14 crc kubenswrapper[5002]: I0930 12:33:14.103179 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-4hv4v" event={"ID":"9381f980-e2fe-4cf6-97ad-93757413f357","Type":"ContainerStarted","Data":"d8b926d69aa2b3ecbb3ba754454943b8bc79e211810ff71646d971729a4364a6"} Sep 30 12:33:14 crc kubenswrapper[5002]: I0930 12:33:14.103195 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-4hv4v" event={"ID":"9381f980-e2fe-4cf6-97ad-93757413f357","Type":"ContainerStarted","Data":"f2637f2a33570eebf816d672aa7a432c598eec3a0ace1011ff48aeca9bc0316c"} Sep 30 12:33:14 crc kubenswrapper[5002]: I0930 12:33:14.103211 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-5d688f5ffc-4hv4v" Sep 30 12:33:14 crc kubenswrapper[5002]: I0930 12:33:14.109020 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/bae6d903-934d-4f98-9924-805cc9b20d5c-memberlist\") pod \"speaker-tq2q9\" (UID: \"bae6d903-934d-4f98-9924-805cc9b20d5c\") " pod="metallb-system/speaker-tq2q9" Sep 30 12:33:14 crc kubenswrapper[5002]: I0930 12:33:14.115943 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/bae6d903-934d-4f98-9924-805cc9b20d5c-memberlist\") pod \"speaker-tq2q9\" (UID: \"bae6d903-934d-4f98-9924-805cc9b20d5c\") " pod="metallb-system/speaker-tq2q9" Sep 30 12:33:14 crc kubenswrapper[5002]: I0930 12:33:14.157261 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-5d688f5ffc-4hv4v" podStartSLOduration=2.15724145 podStartE2EDuration="2.15724145s" podCreationTimestamp="2025-09-30 12:33:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:33:14.152408602 +0000 UTC m=+768.402090758" watchObservedRunningTime="2025-09-30 12:33:14.15724145 +0000 UTC m=+768.406923596" Sep 30 12:33:14 crc kubenswrapper[5002]: I0930 12:33:14.199011 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-tq2q9" Sep 30 12:33:14 crc kubenswrapper[5002]: W0930 12:33:14.218166 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbae6d903_934d_4f98_9924_805cc9b20d5c.slice/crio-7a4cbdc7f73b39ee10a50bc93d2c6a64c0e8c482e12eb73629e78712699c9c86 WatchSource:0}: Error finding container 7a4cbdc7f73b39ee10a50bc93d2c6a64c0e8c482e12eb73629e78712699c9c86: Status 404 returned error can't find the container with id 7a4cbdc7f73b39ee10a50bc93d2c6a64c0e8c482e12eb73629e78712699c9c86 Sep 30 12:33:15 crc kubenswrapper[5002]: I0930 12:33:15.126566 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-tq2q9" event={"ID":"bae6d903-934d-4f98-9924-805cc9b20d5c","Type":"ContainerStarted","Data":"38d3d6d1d49bdbaeeab61d66e60308c18c611d688f05223e99be0c50a1c4d96c"} Sep 30 12:33:15 crc kubenswrapper[5002]: I0930 12:33:15.126938 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-tq2q9" event={"ID":"bae6d903-934d-4f98-9924-805cc9b20d5c","Type":"ContainerStarted","Data":"c6b8c304d6978c2917d8afff6c15f9e33d74887bda6c9c80899bce52bc9de4e8"} Sep 30 12:33:15 crc kubenswrapper[5002]: I0930 12:33:15.126953 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-tq2q9" event={"ID":"bae6d903-934d-4f98-9924-805cc9b20d5c","Type":"ContainerStarted","Data":"7a4cbdc7f73b39ee10a50bc93d2c6a64c0e8c482e12eb73629e78712699c9c86"} Sep 30 12:33:15 crc kubenswrapper[5002]: I0930 12:33:15.127063 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-tq2q9" Sep 30 12:33:15 crc kubenswrapper[5002]: I0930 12:33:15.145844 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-tq2q9" podStartSLOduration=3.145828275 podStartE2EDuration="3.145828275s" podCreationTimestamp="2025-09-30 12:33:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:33:15.14488167 +0000 UTC m=+769.394563826" watchObservedRunningTime="2025-09-30 12:33:15.145828275 +0000 UTC m=+769.395510411" Sep 30 12:33:16 crc kubenswrapper[5002]: I0930 12:33:16.271031 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-l64gf"] Sep 30 12:33:16 crc kubenswrapper[5002]: I0930 12:33:16.273787 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l64gf" Sep 30 12:33:16 crc kubenswrapper[5002]: I0930 12:33:16.285838 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l64gf"] Sep 30 12:33:16 crc kubenswrapper[5002]: I0930 12:33:16.434933 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f437274b-a5ce-4b34-9dbb-c1c251da80f6-catalog-content\") pod \"redhat-operators-l64gf\" (UID: \"f437274b-a5ce-4b34-9dbb-c1c251da80f6\") " pod="openshift-marketplace/redhat-operators-l64gf" Sep 30 12:33:16 crc kubenswrapper[5002]: I0930 12:33:16.434986 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f437274b-a5ce-4b34-9dbb-c1c251da80f6-utilities\") pod \"redhat-operators-l64gf\" (UID: \"f437274b-a5ce-4b34-9dbb-c1c251da80f6\") " pod="openshift-marketplace/redhat-operators-l64gf" Sep 30 12:33:16 crc kubenswrapper[5002]: I0930 12:33:16.435025 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwkr8\" (UniqueName: \"kubernetes.io/projected/f437274b-a5ce-4b34-9dbb-c1c251da80f6-kube-api-access-gwkr8\") pod \"redhat-operators-l64gf\" (UID: \"f437274b-a5ce-4b34-9dbb-c1c251da80f6\") " pod="openshift-marketplace/redhat-operators-l64gf" Sep 30 12:33:16 crc kubenswrapper[5002]: I0930 12:33:16.536426 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f437274b-a5ce-4b34-9dbb-c1c251da80f6-catalog-content\") pod \"redhat-operators-l64gf\" (UID: \"f437274b-a5ce-4b34-9dbb-c1c251da80f6\") " pod="openshift-marketplace/redhat-operators-l64gf" Sep 30 12:33:16 crc kubenswrapper[5002]: I0930 12:33:16.536486 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f437274b-a5ce-4b34-9dbb-c1c251da80f6-utilities\") pod \"redhat-operators-l64gf\" (UID: \"f437274b-a5ce-4b34-9dbb-c1c251da80f6\") " pod="openshift-marketplace/redhat-operators-l64gf" Sep 30 12:33:16 crc kubenswrapper[5002]: I0930 12:33:16.536516 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwkr8\" (UniqueName: \"kubernetes.io/projected/f437274b-a5ce-4b34-9dbb-c1c251da80f6-kube-api-access-gwkr8\") pod \"redhat-operators-l64gf\" (UID: \"f437274b-a5ce-4b34-9dbb-c1c251da80f6\") " pod="openshift-marketplace/redhat-operators-l64gf" Sep 30 12:33:16 crc kubenswrapper[5002]: I0930 12:33:16.536985 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f437274b-a5ce-4b34-9dbb-c1c251da80f6-utilities\") pod \"redhat-operators-l64gf\" (UID: \"f437274b-a5ce-4b34-9dbb-c1c251da80f6\") " pod="openshift-marketplace/redhat-operators-l64gf" Sep 30 12:33:16 crc kubenswrapper[5002]: I0930 12:33:16.537024 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f437274b-a5ce-4b34-9dbb-c1c251da80f6-catalog-content\") pod \"redhat-operators-l64gf\" (UID: \"f437274b-a5ce-4b34-9dbb-c1c251da80f6\") " pod="openshift-marketplace/redhat-operators-l64gf" Sep 30 12:33:16 crc kubenswrapper[5002]: I0930 12:33:16.572413 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwkr8\" (UniqueName: \"kubernetes.io/projected/f437274b-a5ce-4b34-9dbb-c1c251da80f6-kube-api-access-gwkr8\") pod \"redhat-operators-l64gf\" (UID: \"f437274b-a5ce-4b34-9dbb-c1c251da80f6\") " pod="openshift-marketplace/redhat-operators-l64gf" Sep 30 12:33:16 crc kubenswrapper[5002]: I0930 12:33:16.589766 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l64gf" Sep 30 12:33:17 crc kubenswrapper[5002]: I0930 12:33:17.027187 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l64gf"] Sep 30 12:33:17 crc kubenswrapper[5002]: W0930 12:33:17.044821 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf437274b_a5ce_4b34_9dbb_c1c251da80f6.slice/crio-9d8a538cf75652be8e823801eaf86446a62f941d4624fe9df981984e77dd4a2e WatchSource:0}: Error finding container 9d8a538cf75652be8e823801eaf86446a62f941d4624fe9df981984e77dd4a2e: Status 404 returned error can't find the container with id 9d8a538cf75652be8e823801eaf86446a62f941d4624fe9df981984e77dd4a2e Sep 30 12:33:17 crc kubenswrapper[5002]: I0930 12:33:17.147090 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l64gf" event={"ID":"f437274b-a5ce-4b34-9dbb-c1c251da80f6","Type":"ContainerStarted","Data":"9d8a538cf75652be8e823801eaf86446a62f941d4624fe9df981984e77dd4a2e"} Sep 30 12:33:18 crc kubenswrapper[5002]: I0930 12:33:18.156307 5002 generic.go:334] "Generic (PLEG): container finished" podID="f437274b-a5ce-4b34-9dbb-c1c251da80f6" containerID="d16ec3727d74f269993bd5976611f1c5120b24ea5de565985cb4c1c6d3d7f060" exitCode=0 Sep 30 12:33:18 crc kubenswrapper[5002]: I0930 12:33:18.156387 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l64gf" event={"ID":"f437274b-a5ce-4b34-9dbb-c1c251da80f6","Type":"ContainerDied","Data":"d16ec3727d74f269993bd5976611f1c5120b24ea5de565985cb4c1c6d3d7f060"} Sep 30 12:33:21 crc kubenswrapper[5002]: I0930 12:33:21.175980 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-j499w" event={"ID":"94a678f1-7504-4246-9ce4-23886b1a3623","Type":"ContainerStarted","Data":"e463d3b39110eec5005ccc152c81e5727fd722d46a125eaf16b4129bf09224c5"} Sep 30 12:33:21 crc kubenswrapper[5002]: I0930 12:33:21.177994 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-j499w" Sep 30 12:33:21 crc kubenswrapper[5002]: I0930 12:33:21.180349 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l64gf" event={"ID":"f437274b-a5ce-4b34-9dbb-c1c251da80f6","Type":"ContainerStarted","Data":"12639a3535019c532250c7a08ce6f07b5ea37e24cb75536333d39dc7ef294c1d"} Sep 30 12:33:21 crc kubenswrapper[5002]: I0930 12:33:21.183539 5002 generic.go:334] "Generic (PLEG): container finished" podID="66fde16c-f197-4399-bef6-7ea1d7d41611" containerID="796f528afde85524c75a103c098cf5964e5f37ee38fa1c5543c6d1a7bcb4b99f" exitCode=0 Sep 30 12:33:21 crc kubenswrapper[5002]: I0930 12:33:21.183685 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fxsmq" event={"ID":"66fde16c-f197-4399-bef6-7ea1d7d41611","Type":"ContainerDied","Data":"796f528afde85524c75a103c098cf5964e5f37ee38fa1c5543c6d1a7bcb4b99f"} Sep 30 12:33:21 crc kubenswrapper[5002]: I0930 12:33:21.198178 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-j499w" podStartSLOduration=1.635843739 podStartE2EDuration="9.198156131s" podCreationTimestamp="2025-09-30 12:33:12 +0000 UTC" firstStartedPulling="2025-09-30 12:33:13.089188061 +0000 UTC m=+767.338870207" lastFinishedPulling="2025-09-30 12:33:20.651500433 +0000 UTC m=+774.901182599" observedRunningTime="2025-09-30 12:33:21.19476596 +0000 UTC m=+775.444448136" watchObservedRunningTime="2025-09-30 12:33:21.198156131 +0000 UTC m=+775.447838287" Sep 30 12:33:22 crc kubenswrapper[5002]: I0930 12:33:22.190121 5002 generic.go:334] "Generic (PLEG): container finished" podID="66fde16c-f197-4399-bef6-7ea1d7d41611" containerID="30d3c2d9f73c5c748d47412dee36fffe70bc90df781f3abf71e887eefae9e375" exitCode=0 Sep 30 12:33:22 crc kubenswrapper[5002]: I0930 12:33:22.190163 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fxsmq" event={"ID":"66fde16c-f197-4399-bef6-7ea1d7d41611","Type":"ContainerDied","Data":"30d3c2d9f73c5c748d47412dee36fffe70bc90df781f3abf71e887eefae9e375"} Sep 30 12:33:22 crc kubenswrapper[5002]: I0930 12:33:22.191820 5002 generic.go:334] "Generic (PLEG): container finished" podID="f437274b-a5ce-4b34-9dbb-c1c251da80f6" containerID="12639a3535019c532250c7a08ce6f07b5ea37e24cb75536333d39dc7ef294c1d" exitCode=0 Sep 30 12:33:22 crc kubenswrapper[5002]: I0930 12:33:22.191850 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l64gf" event={"ID":"f437274b-a5ce-4b34-9dbb-c1c251da80f6","Type":"ContainerDied","Data":"12639a3535019c532250c7a08ce6f07b5ea37e24cb75536333d39dc7ef294c1d"} Sep 30 12:33:23 crc kubenswrapper[5002]: I0930 12:33:23.202884 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l64gf" event={"ID":"f437274b-a5ce-4b34-9dbb-c1c251da80f6","Type":"ContainerStarted","Data":"f56a451ea40832191cc48fdb8c643b819b62ae5bff363f4b7b0cf7db3df39b0d"} Sep 30 12:33:23 crc kubenswrapper[5002]: I0930 12:33:23.206015 5002 generic.go:334] "Generic (PLEG): container finished" podID="66fde16c-f197-4399-bef6-7ea1d7d41611" containerID="dfa15f25bf221272fda79aa0f6ddb27cc6ed5ae6065426b49ba453c4b3c89a48" exitCode=0 Sep 30 12:33:23 crc kubenswrapper[5002]: I0930 12:33:23.206096 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fxsmq" event={"ID":"66fde16c-f197-4399-bef6-7ea1d7d41611","Type":"ContainerDied","Data":"dfa15f25bf221272fda79aa0f6ddb27cc6ed5ae6065426b49ba453c4b3c89a48"} Sep 30 12:33:23 crc kubenswrapper[5002]: I0930 12:33:23.232297 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-l64gf" podStartSLOduration=2.812441598 podStartE2EDuration="7.232275252s" podCreationTimestamp="2025-09-30 12:33:16 +0000 UTC" firstStartedPulling="2025-09-30 12:33:18.158897353 +0000 UTC m=+772.408579499" lastFinishedPulling="2025-09-30 12:33:22.578730967 +0000 UTC m=+776.828413153" observedRunningTime="2025-09-30 12:33:23.230654838 +0000 UTC m=+777.480337014" watchObservedRunningTime="2025-09-30 12:33:23.232275252 +0000 UTC m=+777.481957438" Sep 30 12:33:23 crc kubenswrapper[5002]: I0930 12:33:23.785313 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zr7jx"] Sep 30 12:33:23 crc kubenswrapper[5002]: I0930 12:33:23.786920 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zr7jx" Sep 30 12:33:23 crc kubenswrapper[5002]: I0930 12:33:23.797905 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zr7jx"] Sep 30 12:33:23 crc kubenswrapper[5002]: I0930 12:33:23.847581 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkpgn\" (UniqueName: \"kubernetes.io/projected/29e69fc1-323d-4a85-afaf-37a85773d1c0-kube-api-access-jkpgn\") pod \"community-operators-zr7jx\" (UID: \"29e69fc1-323d-4a85-afaf-37a85773d1c0\") " pod="openshift-marketplace/community-operators-zr7jx" Sep 30 12:33:23 crc kubenswrapper[5002]: I0930 12:33:23.847864 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29e69fc1-323d-4a85-afaf-37a85773d1c0-catalog-content\") pod \"community-operators-zr7jx\" (UID: \"29e69fc1-323d-4a85-afaf-37a85773d1c0\") " pod="openshift-marketplace/community-operators-zr7jx" Sep 30 12:33:23 crc kubenswrapper[5002]: I0930 12:33:23.848062 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29e69fc1-323d-4a85-afaf-37a85773d1c0-utilities\") pod \"community-operators-zr7jx\" (UID: \"29e69fc1-323d-4a85-afaf-37a85773d1c0\") " pod="openshift-marketplace/community-operators-zr7jx" Sep 30 12:33:23 crc kubenswrapper[5002]: I0930 12:33:23.949782 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29e69fc1-323d-4a85-afaf-37a85773d1c0-catalog-content\") pod \"community-operators-zr7jx\" (UID: \"29e69fc1-323d-4a85-afaf-37a85773d1c0\") " pod="openshift-marketplace/community-operators-zr7jx" Sep 30 12:33:23 crc kubenswrapper[5002]: I0930 12:33:23.949874 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29e69fc1-323d-4a85-afaf-37a85773d1c0-utilities\") pod \"community-operators-zr7jx\" (UID: \"29e69fc1-323d-4a85-afaf-37a85773d1c0\") " pod="openshift-marketplace/community-operators-zr7jx" Sep 30 12:33:23 crc kubenswrapper[5002]: I0930 12:33:23.949948 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkpgn\" (UniqueName: \"kubernetes.io/projected/29e69fc1-323d-4a85-afaf-37a85773d1c0-kube-api-access-jkpgn\") pod \"community-operators-zr7jx\" (UID: \"29e69fc1-323d-4a85-afaf-37a85773d1c0\") " pod="openshift-marketplace/community-operators-zr7jx" Sep 30 12:33:23 crc kubenswrapper[5002]: I0930 12:33:23.950296 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29e69fc1-323d-4a85-afaf-37a85773d1c0-catalog-content\") pod \"community-operators-zr7jx\" (UID: \"29e69fc1-323d-4a85-afaf-37a85773d1c0\") " pod="openshift-marketplace/community-operators-zr7jx" Sep 30 12:33:23 crc kubenswrapper[5002]: I0930 12:33:23.950697 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29e69fc1-323d-4a85-afaf-37a85773d1c0-utilities\") pod \"community-operators-zr7jx\" (UID: \"29e69fc1-323d-4a85-afaf-37a85773d1c0\") " pod="openshift-marketplace/community-operators-zr7jx" Sep 30 12:33:23 crc kubenswrapper[5002]: I0930 12:33:23.972456 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkpgn\" (UniqueName: \"kubernetes.io/projected/29e69fc1-323d-4a85-afaf-37a85773d1c0-kube-api-access-jkpgn\") pod \"community-operators-zr7jx\" (UID: \"29e69fc1-323d-4a85-afaf-37a85773d1c0\") " pod="openshift-marketplace/community-operators-zr7jx" Sep 30 12:33:24 crc kubenswrapper[5002]: I0930 12:33:24.119353 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zr7jx" Sep 30 12:33:24 crc kubenswrapper[5002]: I0930 12:33:24.209420 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-tq2q9" Sep 30 12:33:24 crc kubenswrapper[5002]: I0930 12:33:24.295188 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fxsmq" event={"ID":"66fde16c-f197-4399-bef6-7ea1d7d41611","Type":"ContainerStarted","Data":"4c96211b266a294688a56f20972b311e0f1799a63914dd6128815a033ae584ab"} Sep 30 12:33:24 crc kubenswrapper[5002]: I0930 12:33:24.295615 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fxsmq" event={"ID":"66fde16c-f197-4399-bef6-7ea1d7d41611","Type":"ContainerStarted","Data":"e238593c34c9117ecfaa37f50143c8cce734b849c4241f9acf7a98ba88da285a"} Sep 30 12:33:24 crc kubenswrapper[5002]: I0930 12:33:24.295627 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fxsmq" event={"ID":"66fde16c-f197-4399-bef6-7ea1d7d41611","Type":"ContainerStarted","Data":"e4c9da84598ddf1c55dfa0858a4ad3092be6cd71b461e008949a9b2973d3200d"} Sep 30 12:33:24 crc kubenswrapper[5002]: I0930 12:33:24.295635 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fxsmq" event={"ID":"66fde16c-f197-4399-bef6-7ea1d7d41611","Type":"ContainerStarted","Data":"b16b268b5627c48976b4abc1c88101a2a15810fcced3af77e4106df5dada9de8"} Sep 30 12:33:24 crc kubenswrapper[5002]: I0930 12:33:24.295643 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fxsmq" event={"ID":"66fde16c-f197-4399-bef6-7ea1d7d41611","Type":"ContainerStarted","Data":"e52295f1c4a5dc46d515d55effbca174b9caaf8d84f54e345c9505d135351b09"} Sep 30 12:33:24 crc kubenswrapper[5002]: I0930 12:33:24.610983 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zr7jx"] Sep 30 12:33:25 crc kubenswrapper[5002]: I0930 12:33:25.320710 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fxsmq" event={"ID":"66fde16c-f197-4399-bef6-7ea1d7d41611","Type":"ContainerStarted","Data":"67defdfd741becdfbfc8f6ca97f8bf3d6908f582e0801a5764f5b7ada4310711"} Sep 30 12:33:25 crc kubenswrapper[5002]: I0930 12:33:25.321599 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-fxsmq" Sep 30 12:33:25 crc kubenswrapper[5002]: I0930 12:33:25.323212 5002 generic.go:334] "Generic (PLEG): container finished" podID="29e69fc1-323d-4a85-afaf-37a85773d1c0" containerID="5f274128889180f1c64c838a2a386f94d61ece14e41b8c7fb0770d8dac710b01" exitCode=0 Sep 30 12:33:25 crc kubenswrapper[5002]: I0930 12:33:25.323253 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zr7jx" event={"ID":"29e69fc1-323d-4a85-afaf-37a85773d1c0","Type":"ContainerDied","Data":"5f274128889180f1c64c838a2a386f94d61ece14e41b8c7fb0770d8dac710b01"} Sep 30 12:33:25 crc kubenswrapper[5002]: I0930 12:33:25.323274 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zr7jx" event={"ID":"29e69fc1-323d-4a85-afaf-37a85773d1c0","Type":"ContainerStarted","Data":"d59fe2861ff9e1ba14a1dd6dbacd821be26e6d4b5d7dccbb7d39f317a790d829"} Sep 30 12:33:25 crc kubenswrapper[5002]: I0930 12:33:25.349411 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-fxsmq" podStartSLOduration=5.443935468 podStartE2EDuration="13.349395705s" podCreationTimestamp="2025-09-30 12:33:12 +0000 UTC" firstStartedPulling="2025-09-30 12:33:12.704548947 +0000 UTC m=+766.954231093" lastFinishedPulling="2025-09-30 12:33:20.610009184 +0000 UTC m=+774.859691330" observedRunningTime="2025-09-30 12:33:25.347534263 +0000 UTC m=+779.597216409" watchObservedRunningTime="2025-09-30 12:33:25.349395705 +0000 UTC m=+779.599077851" Sep 30 12:33:26 crc kubenswrapper[5002]: I0930 12:33:26.337120 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zr7jx" event={"ID":"29e69fc1-323d-4a85-afaf-37a85773d1c0","Type":"ContainerStarted","Data":"d6776124e0aaca87ff471479b57bf415100d3e26389b2bf65a56296d8876ab07"} Sep 30 12:33:26 crc kubenswrapper[5002]: I0930 12:33:26.590416 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-l64gf" Sep 30 12:33:26 crc kubenswrapper[5002]: I0930 12:33:26.590527 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-l64gf" Sep 30 12:33:27 crc kubenswrapper[5002]: I0930 12:33:27.344795 5002 generic.go:334] "Generic (PLEG): container finished" podID="29e69fc1-323d-4a85-afaf-37a85773d1c0" containerID="d6776124e0aaca87ff471479b57bf415100d3e26389b2bf65a56296d8876ab07" exitCode=0 Sep 30 12:33:27 crc kubenswrapper[5002]: I0930 12:33:27.344894 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zr7jx" event={"ID":"29e69fc1-323d-4a85-afaf-37a85773d1c0","Type":"ContainerDied","Data":"d6776124e0aaca87ff471479b57bf415100d3e26389b2bf65a56296d8876ab07"} Sep 30 12:33:27 crc kubenswrapper[5002]: I0930 12:33:27.575712 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-fxsmq" Sep 30 12:33:27 crc kubenswrapper[5002]: I0930 12:33:27.611558 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-fxsmq" Sep 30 12:33:27 crc kubenswrapper[5002]: I0930 12:33:27.634915 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-l64gf" podUID="f437274b-a5ce-4b34-9dbb-c1c251da80f6" containerName="registry-server" probeResult="failure" output=< Sep 30 12:33:27 crc kubenswrapper[5002]: timeout: failed to connect service ":50051" within 1s Sep 30 12:33:27 crc kubenswrapper[5002]: > Sep 30 12:33:28 crc kubenswrapper[5002]: I0930 12:33:28.358589 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zr7jx" event={"ID":"29e69fc1-323d-4a85-afaf-37a85773d1c0","Type":"ContainerStarted","Data":"bc5403954c0e9c1d56bc4404a484b1174d45903d903e25bc6e59798a850c1dba"} Sep 30 12:33:28 crc kubenswrapper[5002]: I0930 12:33:28.384309 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zr7jx" podStartSLOduration=2.958211485 podStartE2EDuration="5.384293616s" podCreationTimestamp="2025-09-30 12:33:23 +0000 UTC" firstStartedPulling="2025-09-30 12:33:25.325988533 +0000 UTC m=+779.575670679" lastFinishedPulling="2025-09-30 12:33:27.752070664 +0000 UTC m=+782.001752810" observedRunningTime="2025-09-30 12:33:28.380587714 +0000 UTC m=+782.630269890" watchObservedRunningTime="2025-09-30 12:33:28.384293616 +0000 UTC m=+782.633975762" Sep 30 12:33:30 crc kubenswrapper[5002]: I0930 12:33:30.645857 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-gc6rt"] Sep 30 12:33:30 crc kubenswrapper[5002]: I0930 12:33:30.647088 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-gc6rt" Sep 30 12:33:30 crc kubenswrapper[5002]: I0930 12:33:30.649608 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Sep 30 12:33:30 crc kubenswrapper[5002]: I0930 12:33:30.651206 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-6m788" Sep 30 12:33:30 crc kubenswrapper[5002]: I0930 12:33:30.651771 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Sep 30 12:33:30 crc kubenswrapper[5002]: I0930 12:33:30.661951 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-gc6rt"] Sep 30 12:33:30 crc kubenswrapper[5002]: I0930 12:33:30.746192 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjfks\" (UniqueName: \"kubernetes.io/projected/17eab1ee-1e5f-4092-80e0-77d8a4ca4016-kube-api-access-tjfks\") pod \"openstack-operator-index-gc6rt\" (UID: \"17eab1ee-1e5f-4092-80e0-77d8a4ca4016\") " pod="openstack-operators/openstack-operator-index-gc6rt" Sep 30 12:33:30 crc kubenswrapper[5002]: I0930 12:33:30.847388 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjfks\" (UniqueName: \"kubernetes.io/projected/17eab1ee-1e5f-4092-80e0-77d8a4ca4016-kube-api-access-tjfks\") pod \"openstack-operator-index-gc6rt\" (UID: \"17eab1ee-1e5f-4092-80e0-77d8a4ca4016\") " pod="openstack-operators/openstack-operator-index-gc6rt" Sep 30 12:33:30 crc kubenswrapper[5002]: I0930 12:33:30.881293 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjfks\" (UniqueName: \"kubernetes.io/projected/17eab1ee-1e5f-4092-80e0-77d8a4ca4016-kube-api-access-tjfks\") pod \"openstack-operator-index-gc6rt\" (UID: \"17eab1ee-1e5f-4092-80e0-77d8a4ca4016\") " pod="openstack-operators/openstack-operator-index-gc6rt" Sep 30 12:33:30 crc kubenswrapper[5002]: I0930 12:33:30.974739 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-gc6rt" Sep 30 12:33:31 crc kubenswrapper[5002]: I0930 12:33:31.485921 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-gc6rt"] Sep 30 12:33:32 crc kubenswrapper[5002]: I0930 12:33:32.382238 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-gc6rt" event={"ID":"17eab1ee-1e5f-4092-80e0-77d8a4ca4016","Type":"ContainerStarted","Data":"8f3731783a657d0bf22087e2d2e076054d3389d6704edf11cf84d9a297fd07c5"} Sep 30 12:33:32 crc kubenswrapper[5002]: I0930 12:33:32.649981 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-j499w" Sep 30 12:33:32 crc kubenswrapper[5002]: I0930 12:33:32.695275 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-5d688f5ffc-4hv4v" Sep 30 12:33:34 crc kubenswrapper[5002]: I0930 12:33:34.120538 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zr7jx" Sep 30 12:33:34 crc kubenswrapper[5002]: I0930 12:33:34.120582 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zr7jx" Sep 30 12:33:34 crc kubenswrapper[5002]: I0930 12:33:34.182402 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zr7jx" Sep 30 12:33:34 crc kubenswrapper[5002]: I0930 12:33:34.429765 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zr7jx" Sep 30 12:33:35 crc kubenswrapper[5002]: I0930 12:33:35.844166 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-j7hhf"] Sep 30 12:33:35 crc kubenswrapper[5002]: I0930 12:33:35.845538 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j7hhf" Sep 30 12:33:35 crc kubenswrapper[5002]: I0930 12:33:35.864887 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-j7hhf"] Sep 30 12:33:35 crc kubenswrapper[5002]: I0930 12:33:35.914377 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwp8q\" (UniqueName: \"kubernetes.io/projected/82bc8b10-9d74-440f-8e1a-57d5153d059a-kube-api-access-nwp8q\") pod \"certified-operators-j7hhf\" (UID: \"82bc8b10-9d74-440f-8e1a-57d5153d059a\") " pod="openshift-marketplace/certified-operators-j7hhf" Sep 30 12:33:35 crc kubenswrapper[5002]: I0930 12:33:35.914583 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82bc8b10-9d74-440f-8e1a-57d5153d059a-utilities\") pod \"certified-operators-j7hhf\" (UID: \"82bc8b10-9d74-440f-8e1a-57d5153d059a\") " pod="openshift-marketplace/certified-operators-j7hhf" Sep 30 12:33:35 crc kubenswrapper[5002]: I0930 12:33:35.914635 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82bc8b10-9d74-440f-8e1a-57d5153d059a-catalog-content\") pod \"certified-operators-j7hhf\" (UID: \"82bc8b10-9d74-440f-8e1a-57d5153d059a\") " pod="openshift-marketplace/certified-operators-j7hhf" Sep 30 12:33:36 crc kubenswrapper[5002]: I0930 12:33:36.016032 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82bc8b10-9d74-440f-8e1a-57d5153d059a-utilities\") pod \"certified-operators-j7hhf\" (UID: \"82bc8b10-9d74-440f-8e1a-57d5153d059a\") " pod="openshift-marketplace/certified-operators-j7hhf" Sep 30 12:33:36 crc kubenswrapper[5002]: I0930 12:33:36.016094 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82bc8b10-9d74-440f-8e1a-57d5153d059a-catalog-content\") pod \"certified-operators-j7hhf\" (UID: \"82bc8b10-9d74-440f-8e1a-57d5153d059a\") " pod="openshift-marketplace/certified-operators-j7hhf" Sep 30 12:33:36 crc kubenswrapper[5002]: I0930 12:33:36.016164 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwp8q\" (UniqueName: \"kubernetes.io/projected/82bc8b10-9d74-440f-8e1a-57d5153d059a-kube-api-access-nwp8q\") pod \"certified-operators-j7hhf\" (UID: \"82bc8b10-9d74-440f-8e1a-57d5153d059a\") " pod="openshift-marketplace/certified-operators-j7hhf" Sep 30 12:33:36 crc kubenswrapper[5002]: I0930 12:33:36.016724 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82bc8b10-9d74-440f-8e1a-57d5153d059a-utilities\") pod \"certified-operators-j7hhf\" (UID: \"82bc8b10-9d74-440f-8e1a-57d5153d059a\") " pod="openshift-marketplace/certified-operators-j7hhf" Sep 30 12:33:36 crc kubenswrapper[5002]: I0930 12:33:36.016756 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82bc8b10-9d74-440f-8e1a-57d5153d059a-catalog-content\") pod \"certified-operators-j7hhf\" (UID: \"82bc8b10-9d74-440f-8e1a-57d5153d059a\") " pod="openshift-marketplace/certified-operators-j7hhf" Sep 30 12:33:36 crc kubenswrapper[5002]: I0930 12:33:36.054993 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwp8q\" (UniqueName: \"kubernetes.io/projected/82bc8b10-9d74-440f-8e1a-57d5153d059a-kube-api-access-nwp8q\") pod \"certified-operators-j7hhf\" (UID: \"82bc8b10-9d74-440f-8e1a-57d5153d059a\") " pod="openshift-marketplace/certified-operators-j7hhf" Sep 30 12:33:36 crc kubenswrapper[5002]: I0930 12:33:36.165031 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j7hhf" Sep 30 12:33:37 crc kubenswrapper[5002]: I0930 12:33:36.629799 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-l64gf" Sep 30 12:33:37 crc kubenswrapper[5002]: I0930 12:33:36.673859 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-l64gf" Sep 30 12:33:37 crc kubenswrapper[5002]: I0930 12:33:36.824565 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-j7hhf"] Sep 30 12:33:37 crc kubenswrapper[5002]: I0930 12:33:37.412141 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-gc6rt" event={"ID":"17eab1ee-1e5f-4092-80e0-77d8a4ca4016","Type":"ContainerStarted","Data":"45e59863695e1760b4e00307a18cead91d6b8c7c0113abe0503d53524856206f"} Sep 30 12:33:37 crc kubenswrapper[5002]: I0930 12:33:37.413837 5002 generic.go:334] "Generic (PLEG): container finished" podID="82bc8b10-9d74-440f-8e1a-57d5153d059a" containerID="6d111258697baaa574b42e1081557ee109f37e825eeb9dec787480e442b1ac3a" exitCode=0 Sep 30 12:33:37 crc kubenswrapper[5002]: I0930 12:33:37.414787 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j7hhf" event={"ID":"82bc8b10-9d74-440f-8e1a-57d5153d059a","Type":"ContainerDied","Data":"6d111258697baaa574b42e1081557ee109f37e825eeb9dec787480e442b1ac3a"} Sep 30 12:33:37 crc kubenswrapper[5002]: I0930 12:33:37.414858 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j7hhf" event={"ID":"82bc8b10-9d74-440f-8e1a-57d5153d059a","Type":"ContainerStarted","Data":"6f7530c1e1ede63ce5f521f1b6532b4f3fd1ec377f9dfa9368c9bcfa9b06d304"} Sep 30 12:33:37 crc kubenswrapper[5002]: I0930 12:33:37.431921 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-gc6rt" podStartSLOduration=2.509441416 podStartE2EDuration="7.431885003s" podCreationTimestamp="2025-09-30 12:33:30 +0000 UTC" firstStartedPulling="2025-09-30 12:33:31.497124226 +0000 UTC m=+785.746806392" lastFinishedPulling="2025-09-30 12:33:36.419567833 +0000 UTC m=+790.669249979" observedRunningTime="2025-09-30 12:33:37.428642433 +0000 UTC m=+791.678324599" watchObservedRunningTime="2025-09-30 12:33:37.431885003 +0000 UTC m=+791.681567229" Sep 30 12:33:38 crc kubenswrapper[5002]: I0930 12:33:38.038672 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zr7jx"] Sep 30 12:33:38 crc kubenswrapper[5002]: I0930 12:33:38.039034 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zr7jx" podUID="29e69fc1-323d-4a85-afaf-37a85773d1c0" containerName="registry-server" containerID="cri-o://bc5403954c0e9c1d56bc4404a484b1174d45903d903e25bc6e59798a850c1dba" gracePeriod=2 Sep 30 12:33:38 crc kubenswrapper[5002]: I0930 12:33:38.426874 5002 generic.go:334] "Generic (PLEG): container finished" podID="29e69fc1-323d-4a85-afaf-37a85773d1c0" containerID="bc5403954c0e9c1d56bc4404a484b1174d45903d903e25bc6e59798a850c1dba" exitCode=0 Sep 30 12:33:38 crc kubenswrapper[5002]: I0930 12:33:38.426956 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zr7jx" event={"ID":"29e69fc1-323d-4a85-afaf-37a85773d1c0","Type":"ContainerDied","Data":"bc5403954c0e9c1d56bc4404a484b1174d45903d903e25bc6e59798a850c1dba"} Sep 30 12:33:38 crc kubenswrapper[5002]: I0930 12:33:38.467155 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zr7jx" Sep 30 12:33:38 crc kubenswrapper[5002]: I0930 12:33:38.658208 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkpgn\" (UniqueName: \"kubernetes.io/projected/29e69fc1-323d-4a85-afaf-37a85773d1c0-kube-api-access-jkpgn\") pod \"29e69fc1-323d-4a85-afaf-37a85773d1c0\" (UID: \"29e69fc1-323d-4a85-afaf-37a85773d1c0\") " Sep 30 12:33:38 crc kubenswrapper[5002]: I0930 12:33:38.658319 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29e69fc1-323d-4a85-afaf-37a85773d1c0-utilities\") pod \"29e69fc1-323d-4a85-afaf-37a85773d1c0\" (UID: \"29e69fc1-323d-4a85-afaf-37a85773d1c0\") " Sep 30 12:33:38 crc kubenswrapper[5002]: I0930 12:33:38.658397 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29e69fc1-323d-4a85-afaf-37a85773d1c0-catalog-content\") pod \"29e69fc1-323d-4a85-afaf-37a85773d1c0\" (UID: \"29e69fc1-323d-4a85-afaf-37a85773d1c0\") " Sep 30 12:33:38 crc kubenswrapper[5002]: I0930 12:33:38.659223 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29e69fc1-323d-4a85-afaf-37a85773d1c0-utilities" (OuterVolumeSpecName: "utilities") pod "29e69fc1-323d-4a85-afaf-37a85773d1c0" (UID: "29e69fc1-323d-4a85-afaf-37a85773d1c0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:33:38 crc kubenswrapper[5002]: I0930 12:33:38.663014 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29e69fc1-323d-4a85-afaf-37a85773d1c0-kube-api-access-jkpgn" (OuterVolumeSpecName: "kube-api-access-jkpgn") pod "29e69fc1-323d-4a85-afaf-37a85773d1c0" (UID: "29e69fc1-323d-4a85-afaf-37a85773d1c0"). InnerVolumeSpecName "kube-api-access-jkpgn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:33:38 crc kubenswrapper[5002]: I0930 12:33:38.704039 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29e69fc1-323d-4a85-afaf-37a85773d1c0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "29e69fc1-323d-4a85-afaf-37a85773d1c0" (UID: "29e69fc1-323d-4a85-afaf-37a85773d1c0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:33:38 crc kubenswrapper[5002]: I0930 12:33:38.759289 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29e69fc1-323d-4a85-afaf-37a85773d1c0-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 12:33:38 crc kubenswrapper[5002]: I0930 12:33:38.759318 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkpgn\" (UniqueName: \"kubernetes.io/projected/29e69fc1-323d-4a85-afaf-37a85773d1c0-kube-api-access-jkpgn\") on node \"crc\" DevicePath \"\"" Sep 30 12:33:38 crc kubenswrapper[5002]: I0930 12:33:38.759328 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29e69fc1-323d-4a85-afaf-37a85773d1c0-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 12:33:39 crc kubenswrapper[5002]: I0930 12:33:39.440723 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zr7jx" Sep 30 12:33:39 crc kubenswrapper[5002]: I0930 12:33:39.440730 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zr7jx" event={"ID":"29e69fc1-323d-4a85-afaf-37a85773d1c0","Type":"ContainerDied","Data":"d59fe2861ff9e1ba14a1dd6dbacd821be26e6d4b5d7dccbb7d39f317a790d829"} Sep 30 12:33:39 crc kubenswrapper[5002]: I0930 12:33:39.441338 5002 scope.go:117] "RemoveContainer" containerID="bc5403954c0e9c1d56bc4404a484b1174d45903d903e25bc6e59798a850c1dba" Sep 30 12:33:39 crc kubenswrapper[5002]: I0930 12:33:39.445257 5002 generic.go:334] "Generic (PLEG): container finished" podID="82bc8b10-9d74-440f-8e1a-57d5153d059a" containerID="0de6729627aee82c1abe9e935387d60c54e182bfdaefff82b5eebf68e735c838" exitCode=0 Sep 30 12:33:39 crc kubenswrapper[5002]: I0930 12:33:39.445340 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j7hhf" event={"ID":"82bc8b10-9d74-440f-8e1a-57d5153d059a","Type":"ContainerDied","Data":"0de6729627aee82c1abe9e935387d60c54e182bfdaefff82b5eebf68e735c838"} Sep 30 12:33:39 crc kubenswrapper[5002]: I0930 12:33:39.469832 5002 scope.go:117] "RemoveContainer" containerID="d6776124e0aaca87ff471479b57bf415100d3e26389b2bf65a56296d8876ab07" Sep 30 12:33:39 crc kubenswrapper[5002]: I0930 12:33:39.506653 5002 scope.go:117] "RemoveContainer" containerID="5f274128889180f1c64c838a2a386f94d61ece14e41b8c7fb0770d8dac710b01" Sep 30 12:33:39 crc kubenswrapper[5002]: I0930 12:33:39.512559 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zr7jx"] Sep 30 12:33:39 crc kubenswrapper[5002]: I0930 12:33:39.526216 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zr7jx"] Sep 30 12:33:40 crc kubenswrapper[5002]: I0930 12:33:40.691527 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29e69fc1-323d-4a85-afaf-37a85773d1c0" path="/var/lib/kubelet/pods/29e69fc1-323d-4a85-afaf-37a85773d1c0/volumes" Sep 30 12:33:40 crc kubenswrapper[5002]: I0930 12:33:40.974887 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-gc6rt" Sep 30 12:33:40 crc kubenswrapper[5002]: I0930 12:33:40.974941 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-gc6rt" Sep 30 12:33:41 crc kubenswrapper[5002]: I0930 12:33:41.014905 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-gc6rt" Sep 30 12:33:41 crc kubenswrapper[5002]: I0930 12:33:41.469993 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j7hhf" event={"ID":"82bc8b10-9d74-440f-8e1a-57d5153d059a","Type":"ContainerStarted","Data":"59a8b31dae674dfe6c098a46a7dd4219e14aa61fcde4ec220e1b471fdcdda537"} Sep 30 12:33:41 crc kubenswrapper[5002]: I0930 12:33:41.494763 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-j7hhf" podStartSLOduration=3.541878201 podStartE2EDuration="6.49474266s" podCreationTimestamp="2025-09-30 12:33:35 +0000 UTC" firstStartedPulling="2025-09-30 12:33:37.415426514 +0000 UTC m=+791.665108660" lastFinishedPulling="2025-09-30 12:33:40.368290933 +0000 UTC m=+794.617973119" observedRunningTime="2025-09-30 12:33:41.493795593 +0000 UTC m=+795.743477779" watchObservedRunningTime="2025-09-30 12:33:41.49474266 +0000 UTC m=+795.744424816" Sep 30 12:33:41 crc kubenswrapper[5002]: I0930 12:33:41.512812 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-gc6rt" Sep 30 12:33:41 crc kubenswrapper[5002]: I0930 12:33:41.835311 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-l64gf"] Sep 30 12:33:41 crc kubenswrapper[5002]: I0930 12:33:41.835657 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-l64gf" podUID="f437274b-a5ce-4b34-9dbb-c1c251da80f6" containerName="registry-server" containerID="cri-o://f56a451ea40832191cc48fdb8c643b819b62ae5bff363f4b7b0cf7db3df39b0d" gracePeriod=2 Sep 30 12:33:42 crc kubenswrapper[5002]: I0930 12:33:42.255346 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l64gf" Sep 30 12:33:42 crc kubenswrapper[5002]: I0930 12:33:42.408631 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f437274b-a5ce-4b34-9dbb-c1c251da80f6-catalog-content\") pod \"f437274b-a5ce-4b34-9dbb-c1c251da80f6\" (UID: \"f437274b-a5ce-4b34-9dbb-c1c251da80f6\") " Sep 30 12:33:42 crc kubenswrapper[5002]: I0930 12:33:42.408741 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f437274b-a5ce-4b34-9dbb-c1c251da80f6-utilities\") pod \"f437274b-a5ce-4b34-9dbb-c1c251da80f6\" (UID: \"f437274b-a5ce-4b34-9dbb-c1c251da80f6\") " Sep 30 12:33:42 crc kubenswrapper[5002]: I0930 12:33:42.408847 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gwkr8\" (UniqueName: \"kubernetes.io/projected/f437274b-a5ce-4b34-9dbb-c1c251da80f6-kube-api-access-gwkr8\") pod \"f437274b-a5ce-4b34-9dbb-c1c251da80f6\" (UID: \"f437274b-a5ce-4b34-9dbb-c1c251da80f6\") " Sep 30 12:33:42 crc kubenswrapper[5002]: I0930 12:33:42.409754 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f437274b-a5ce-4b34-9dbb-c1c251da80f6-utilities" (OuterVolumeSpecName: "utilities") pod "f437274b-a5ce-4b34-9dbb-c1c251da80f6" (UID: "f437274b-a5ce-4b34-9dbb-c1c251da80f6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:33:42 crc kubenswrapper[5002]: I0930 12:33:42.413875 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f437274b-a5ce-4b34-9dbb-c1c251da80f6-kube-api-access-gwkr8" (OuterVolumeSpecName: "kube-api-access-gwkr8") pod "f437274b-a5ce-4b34-9dbb-c1c251da80f6" (UID: "f437274b-a5ce-4b34-9dbb-c1c251da80f6"). InnerVolumeSpecName "kube-api-access-gwkr8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:33:42 crc kubenswrapper[5002]: I0930 12:33:42.477091 5002 generic.go:334] "Generic (PLEG): container finished" podID="f437274b-a5ce-4b34-9dbb-c1c251da80f6" containerID="f56a451ea40832191cc48fdb8c643b819b62ae5bff363f4b7b0cf7db3df39b0d" exitCode=0 Sep 30 12:33:42 crc kubenswrapper[5002]: I0930 12:33:42.477171 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l64gf" Sep 30 12:33:42 crc kubenswrapper[5002]: I0930 12:33:42.477180 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l64gf" event={"ID":"f437274b-a5ce-4b34-9dbb-c1c251da80f6","Type":"ContainerDied","Data":"f56a451ea40832191cc48fdb8c643b819b62ae5bff363f4b7b0cf7db3df39b0d"} Sep 30 12:33:42 crc kubenswrapper[5002]: I0930 12:33:42.477614 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l64gf" event={"ID":"f437274b-a5ce-4b34-9dbb-c1c251da80f6","Type":"ContainerDied","Data":"9d8a538cf75652be8e823801eaf86446a62f941d4624fe9df981984e77dd4a2e"} Sep 30 12:33:42 crc kubenswrapper[5002]: I0930 12:33:42.477650 5002 scope.go:117] "RemoveContainer" containerID="f56a451ea40832191cc48fdb8c643b819b62ae5bff363f4b7b0cf7db3df39b0d" Sep 30 12:33:42 crc kubenswrapper[5002]: I0930 12:33:42.494053 5002 scope.go:117] "RemoveContainer" containerID="12639a3535019c532250c7a08ce6f07b5ea37e24cb75536333d39dc7ef294c1d" Sep 30 12:33:42 crc kubenswrapper[5002]: I0930 12:33:42.496000 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f437274b-a5ce-4b34-9dbb-c1c251da80f6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f437274b-a5ce-4b34-9dbb-c1c251da80f6" (UID: "f437274b-a5ce-4b34-9dbb-c1c251da80f6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:33:42 crc kubenswrapper[5002]: I0930 12:33:42.510462 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f437274b-a5ce-4b34-9dbb-c1c251da80f6-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 12:33:42 crc kubenswrapper[5002]: I0930 12:33:42.510506 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gwkr8\" (UniqueName: \"kubernetes.io/projected/f437274b-a5ce-4b34-9dbb-c1c251da80f6-kube-api-access-gwkr8\") on node \"crc\" DevicePath \"\"" Sep 30 12:33:42 crc kubenswrapper[5002]: I0930 12:33:42.510519 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f437274b-a5ce-4b34-9dbb-c1c251da80f6-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 12:33:42 crc kubenswrapper[5002]: I0930 12:33:42.510680 5002 scope.go:117] "RemoveContainer" containerID="d16ec3727d74f269993bd5976611f1c5120b24ea5de565985cb4c1c6d3d7f060" Sep 30 12:33:42 crc kubenswrapper[5002]: I0930 12:33:42.528170 5002 scope.go:117] "RemoveContainer" containerID="f56a451ea40832191cc48fdb8c643b819b62ae5bff363f4b7b0cf7db3df39b0d" Sep 30 12:33:42 crc kubenswrapper[5002]: E0930 12:33:42.528523 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f56a451ea40832191cc48fdb8c643b819b62ae5bff363f4b7b0cf7db3df39b0d\": container with ID starting with f56a451ea40832191cc48fdb8c643b819b62ae5bff363f4b7b0cf7db3df39b0d not found: ID does not exist" containerID="f56a451ea40832191cc48fdb8c643b819b62ae5bff363f4b7b0cf7db3df39b0d" Sep 30 12:33:42 crc kubenswrapper[5002]: I0930 12:33:42.528552 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f56a451ea40832191cc48fdb8c643b819b62ae5bff363f4b7b0cf7db3df39b0d"} err="failed to get container status \"f56a451ea40832191cc48fdb8c643b819b62ae5bff363f4b7b0cf7db3df39b0d\": rpc error: code = NotFound desc = could not find container \"f56a451ea40832191cc48fdb8c643b819b62ae5bff363f4b7b0cf7db3df39b0d\": container with ID starting with f56a451ea40832191cc48fdb8c643b819b62ae5bff363f4b7b0cf7db3df39b0d not found: ID does not exist" Sep 30 12:33:42 crc kubenswrapper[5002]: I0930 12:33:42.528579 5002 scope.go:117] "RemoveContainer" containerID="12639a3535019c532250c7a08ce6f07b5ea37e24cb75536333d39dc7ef294c1d" Sep 30 12:33:42 crc kubenswrapper[5002]: E0930 12:33:42.529901 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12639a3535019c532250c7a08ce6f07b5ea37e24cb75536333d39dc7ef294c1d\": container with ID starting with 12639a3535019c532250c7a08ce6f07b5ea37e24cb75536333d39dc7ef294c1d not found: ID does not exist" containerID="12639a3535019c532250c7a08ce6f07b5ea37e24cb75536333d39dc7ef294c1d" Sep 30 12:33:42 crc kubenswrapper[5002]: I0930 12:33:42.529931 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12639a3535019c532250c7a08ce6f07b5ea37e24cb75536333d39dc7ef294c1d"} err="failed to get container status \"12639a3535019c532250c7a08ce6f07b5ea37e24cb75536333d39dc7ef294c1d\": rpc error: code = NotFound desc = could not find container \"12639a3535019c532250c7a08ce6f07b5ea37e24cb75536333d39dc7ef294c1d\": container with ID starting with 12639a3535019c532250c7a08ce6f07b5ea37e24cb75536333d39dc7ef294c1d not found: ID does not exist" Sep 30 12:33:42 crc kubenswrapper[5002]: I0930 12:33:42.529960 5002 scope.go:117] "RemoveContainer" containerID="d16ec3727d74f269993bd5976611f1c5120b24ea5de565985cb4c1c6d3d7f060" Sep 30 12:33:42 crc kubenswrapper[5002]: E0930 12:33:42.530233 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d16ec3727d74f269993bd5976611f1c5120b24ea5de565985cb4c1c6d3d7f060\": container with ID starting with d16ec3727d74f269993bd5976611f1c5120b24ea5de565985cb4c1c6d3d7f060 not found: ID does not exist" containerID="d16ec3727d74f269993bd5976611f1c5120b24ea5de565985cb4c1c6d3d7f060" Sep 30 12:33:42 crc kubenswrapper[5002]: I0930 12:33:42.530277 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d16ec3727d74f269993bd5976611f1c5120b24ea5de565985cb4c1c6d3d7f060"} err="failed to get container status \"d16ec3727d74f269993bd5976611f1c5120b24ea5de565985cb4c1c6d3d7f060\": rpc error: code = NotFound desc = could not find container \"d16ec3727d74f269993bd5976611f1c5120b24ea5de565985cb4c1c6d3d7f060\": container with ID starting with d16ec3727d74f269993bd5976611f1c5120b24ea5de565985cb4c1c6d3d7f060 not found: ID does not exist" Sep 30 12:33:42 crc kubenswrapper[5002]: I0930 12:33:42.578050 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-fxsmq" Sep 30 12:33:42 crc kubenswrapper[5002]: I0930 12:33:42.792382 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-l64gf"] Sep 30 12:33:42 crc kubenswrapper[5002]: I0930 12:33:42.795521 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-l64gf"] Sep 30 12:33:44 crc kubenswrapper[5002]: I0930 12:33:44.682715 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f437274b-a5ce-4b34-9dbb-c1c251da80f6" path="/var/lib/kubelet/pods/f437274b-a5ce-4b34-9dbb-c1c251da80f6/volumes" Sep 30 12:33:45 crc kubenswrapper[5002]: I0930 12:33:45.492295 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf"] Sep 30 12:33:45 crc kubenswrapper[5002]: E0930 12:33:45.492536 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f437274b-a5ce-4b34-9dbb-c1c251da80f6" containerName="extract-utilities" Sep 30 12:33:45 crc kubenswrapper[5002]: I0930 12:33:45.492551 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f437274b-a5ce-4b34-9dbb-c1c251da80f6" containerName="extract-utilities" Sep 30 12:33:45 crc kubenswrapper[5002]: E0930 12:33:45.492566 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29e69fc1-323d-4a85-afaf-37a85773d1c0" containerName="registry-server" Sep 30 12:33:45 crc kubenswrapper[5002]: I0930 12:33:45.492573 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="29e69fc1-323d-4a85-afaf-37a85773d1c0" containerName="registry-server" Sep 30 12:33:45 crc kubenswrapper[5002]: E0930 12:33:45.492586 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29e69fc1-323d-4a85-afaf-37a85773d1c0" containerName="extract-utilities" Sep 30 12:33:45 crc kubenswrapper[5002]: I0930 12:33:45.492592 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="29e69fc1-323d-4a85-afaf-37a85773d1c0" containerName="extract-utilities" Sep 30 12:33:45 crc kubenswrapper[5002]: E0930 12:33:45.492601 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f437274b-a5ce-4b34-9dbb-c1c251da80f6" containerName="registry-server" Sep 30 12:33:45 crc kubenswrapper[5002]: I0930 12:33:45.492606 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f437274b-a5ce-4b34-9dbb-c1c251da80f6" containerName="registry-server" Sep 30 12:33:45 crc kubenswrapper[5002]: E0930 12:33:45.492618 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f437274b-a5ce-4b34-9dbb-c1c251da80f6" containerName="extract-content" Sep 30 12:33:45 crc kubenswrapper[5002]: I0930 12:33:45.492624 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f437274b-a5ce-4b34-9dbb-c1c251da80f6" containerName="extract-content" Sep 30 12:33:45 crc kubenswrapper[5002]: E0930 12:33:45.492632 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29e69fc1-323d-4a85-afaf-37a85773d1c0" containerName="extract-content" Sep 30 12:33:45 crc kubenswrapper[5002]: I0930 12:33:45.492638 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="29e69fc1-323d-4a85-afaf-37a85773d1c0" containerName="extract-content" Sep 30 12:33:45 crc kubenswrapper[5002]: I0930 12:33:45.492737 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f437274b-a5ce-4b34-9dbb-c1c251da80f6" containerName="registry-server" Sep 30 12:33:45 crc kubenswrapper[5002]: I0930 12:33:45.492750 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="29e69fc1-323d-4a85-afaf-37a85773d1c0" containerName="registry-server" Sep 30 12:33:45 crc kubenswrapper[5002]: I0930 12:33:45.493552 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf" Sep 30 12:33:45 crc kubenswrapper[5002]: I0930 12:33:45.495390 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-hjjks" Sep 30 12:33:45 crc kubenswrapper[5002]: I0930 12:33:45.512754 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf"] Sep 30 12:33:45 crc kubenswrapper[5002]: I0930 12:33:45.660067 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bd5d2600-fe6f-407b-8110-97516c3117cb-bundle\") pod \"fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf\" (UID: \"bd5d2600-fe6f-407b-8110-97516c3117cb\") " pod="openstack-operators/fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf" Sep 30 12:33:45 crc kubenswrapper[5002]: I0930 12:33:45.660132 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bd5d2600-fe6f-407b-8110-97516c3117cb-util\") pod \"fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf\" (UID: \"bd5d2600-fe6f-407b-8110-97516c3117cb\") " pod="openstack-operators/fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf" Sep 30 12:33:45 crc kubenswrapper[5002]: I0930 12:33:45.660232 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfnm8\" (UniqueName: \"kubernetes.io/projected/bd5d2600-fe6f-407b-8110-97516c3117cb-kube-api-access-qfnm8\") pod \"fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf\" (UID: \"bd5d2600-fe6f-407b-8110-97516c3117cb\") " pod="openstack-operators/fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf" Sep 30 12:33:45 crc kubenswrapper[5002]: I0930 12:33:45.761299 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bd5d2600-fe6f-407b-8110-97516c3117cb-bundle\") pod \"fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf\" (UID: \"bd5d2600-fe6f-407b-8110-97516c3117cb\") " pod="openstack-operators/fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf" Sep 30 12:33:45 crc kubenswrapper[5002]: I0930 12:33:45.761404 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bd5d2600-fe6f-407b-8110-97516c3117cb-util\") pod \"fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf\" (UID: \"bd5d2600-fe6f-407b-8110-97516c3117cb\") " pod="openstack-operators/fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf" Sep 30 12:33:45 crc kubenswrapper[5002]: I0930 12:33:45.761561 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfnm8\" (UniqueName: \"kubernetes.io/projected/bd5d2600-fe6f-407b-8110-97516c3117cb-kube-api-access-qfnm8\") pod \"fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf\" (UID: \"bd5d2600-fe6f-407b-8110-97516c3117cb\") " pod="openstack-operators/fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf" Sep 30 12:33:45 crc kubenswrapper[5002]: I0930 12:33:45.761875 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bd5d2600-fe6f-407b-8110-97516c3117cb-bundle\") pod \"fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf\" (UID: \"bd5d2600-fe6f-407b-8110-97516c3117cb\") " pod="openstack-operators/fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf" Sep 30 12:33:45 crc kubenswrapper[5002]: I0930 12:33:45.761939 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bd5d2600-fe6f-407b-8110-97516c3117cb-util\") pod \"fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf\" (UID: \"bd5d2600-fe6f-407b-8110-97516c3117cb\") " pod="openstack-operators/fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf" Sep 30 12:33:45 crc kubenswrapper[5002]: I0930 12:33:45.784197 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfnm8\" (UniqueName: \"kubernetes.io/projected/bd5d2600-fe6f-407b-8110-97516c3117cb-kube-api-access-qfnm8\") pod \"fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf\" (UID: \"bd5d2600-fe6f-407b-8110-97516c3117cb\") " pod="openstack-operators/fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf" Sep 30 12:33:45 crc kubenswrapper[5002]: I0930 12:33:45.808047 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf" Sep 30 12:33:46 crc kubenswrapper[5002]: I0930 12:33:46.165466 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-j7hhf" Sep 30 12:33:46 crc kubenswrapper[5002]: I0930 12:33:46.165589 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-j7hhf" Sep 30 12:33:46 crc kubenswrapper[5002]: I0930 12:33:46.212387 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf"] Sep 30 12:33:46 crc kubenswrapper[5002]: W0930 12:33:46.223182 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbd5d2600_fe6f_407b_8110_97516c3117cb.slice/crio-d44734dabd5dfc3fb7c78db4a2e0f45db5d23b1fb08774ff90090ad2967b9860 WatchSource:0}: Error finding container d44734dabd5dfc3fb7c78db4a2e0f45db5d23b1fb08774ff90090ad2967b9860: Status 404 returned error can't find the container with id d44734dabd5dfc3fb7c78db4a2e0f45db5d23b1fb08774ff90090ad2967b9860 Sep 30 12:33:46 crc kubenswrapper[5002]: I0930 12:33:46.235620 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-j7hhf" Sep 30 12:33:46 crc kubenswrapper[5002]: I0930 12:33:46.506298 5002 generic.go:334] "Generic (PLEG): container finished" podID="bd5d2600-fe6f-407b-8110-97516c3117cb" containerID="ba70ff9f60094d1495b13a8e9077739972690ba323be6abd53276eba7603efb4" exitCode=0 Sep 30 12:33:46 crc kubenswrapper[5002]: I0930 12:33:46.506371 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf" event={"ID":"bd5d2600-fe6f-407b-8110-97516c3117cb","Type":"ContainerDied","Data":"ba70ff9f60094d1495b13a8e9077739972690ba323be6abd53276eba7603efb4"} Sep 30 12:33:46 crc kubenswrapper[5002]: I0930 12:33:46.506629 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf" event={"ID":"bd5d2600-fe6f-407b-8110-97516c3117cb","Type":"ContainerStarted","Data":"d44734dabd5dfc3fb7c78db4a2e0f45db5d23b1fb08774ff90090ad2967b9860"} Sep 30 12:33:46 crc kubenswrapper[5002]: I0930 12:33:46.546026 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-j7hhf" Sep 30 12:33:47 crc kubenswrapper[5002]: I0930 12:33:47.520086 5002 generic.go:334] "Generic (PLEG): container finished" podID="bd5d2600-fe6f-407b-8110-97516c3117cb" containerID="def1dbfa2c7f0eb0ecec5a9f4ae1fe2e29024a4de198c260e8794bcb26013f78" exitCode=0 Sep 30 12:33:47 crc kubenswrapper[5002]: I0930 12:33:47.520159 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf" event={"ID":"bd5d2600-fe6f-407b-8110-97516c3117cb","Type":"ContainerDied","Data":"def1dbfa2c7f0eb0ecec5a9f4ae1fe2e29024a4de198c260e8794bcb26013f78"} Sep 30 12:33:48 crc kubenswrapper[5002]: I0930 12:33:48.060610 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2rz4g"] Sep 30 12:33:48 crc kubenswrapper[5002]: I0930 12:33:48.062339 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2rz4g" Sep 30 12:33:48 crc kubenswrapper[5002]: I0930 12:33:48.077275 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2rz4g"] Sep 30 12:33:48 crc kubenswrapper[5002]: I0930 12:33:48.192927 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f54da31a-6088-4cbc-bdef-51fe08af5365-utilities\") pod \"redhat-marketplace-2rz4g\" (UID: \"f54da31a-6088-4cbc-bdef-51fe08af5365\") " pod="openshift-marketplace/redhat-marketplace-2rz4g" Sep 30 12:33:48 crc kubenswrapper[5002]: I0930 12:33:48.192974 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f54da31a-6088-4cbc-bdef-51fe08af5365-catalog-content\") pod \"redhat-marketplace-2rz4g\" (UID: \"f54da31a-6088-4cbc-bdef-51fe08af5365\") " pod="openshift-marketplace/redhat-marketplace-2rz4g" Sep 30 12:33:48 crc kubenswrapper[5002]: I0930 12:33:48.193003 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwsq7\" (UniqueName: \"kubernetes.io/projected/f54da31a-6088-4cbc-bdef-51fe08af5365-kube-api-access-pwsq7\") pod \"redhat-marketplace-2rz4g\" (UID: \"f54da31a-6088-4cbc-bdef-51fe08af5365\") " pod="openshift-marketplace/redhat-marketplace-2rz4g" Sep 30 12:33:48 crc kubenswrapper[5002]: I0930 12:33:48.294288 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f54da31a-6088-4cbc-bdef-51fe08af5365-utilities\") pod \"redhat-marketplace-2rz4g\" (UID: \"f54da31a-6088-4cbc-bdef-51fe08af5365\") " pod="openshift-marketplace/redhat-marketplace-2rz4g" Sep 30 12:33:48 crc kubenswrapper[5002]: I0930 12:33:48.294333 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f54da31a-6088-4cbc-bdef-51fe08af5365-catalog-content\") pod \"redhat-marketplace-2rz4g\" (UID: \"f54da31a-6088-4cbc-bdef-51fe08af5365\") " pod="openshift-marketplace/redhat-marketplace-2rz4g" Sep 30 12:33:48 crc kubenswrapper[5002]: I0930 12:33:48.294367 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwsq7\" (UniqueName: \"kubernetes.io/projected/f54da31a-6088-4cbc-bdef-51fe08af5365-kube-api-access-pwsq7\") pod \"redhat-marketplace-2rz4g\" (UID: \"f54da31a-6088-4cbc-bdef-51fe08af5365\") " pod="openshift-marketplace/redhat-marketplace-2rz4g" Sep 30 12:33:48 crc kubenswrapper[5002]: I0930 12:33:48.294911 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f54da31a-6088-4cbc-bdef-51fe08af5365-utilities\") pod \"redhat-marketplace-2rz4g\" (UID: \"f54da31a-6088-4cbc-bdef-51fe08af5365\") " pod="openshift-marketplace/redhat-marketplace-2rz4g" Sep 30 12:33:48 crc kubenswrapper[5002]: I0930 12:33:48.294924 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f54da31a-6088-4cbc-bdef-51fe08af5365-catalog-content\") pod \"redhat-marketplace-2rz4g\" (UID: \"f54da31a-6088-4cbc-bdef-51fe08af5365\") " pod="openshift-marketplace/redhat-marketplace-2rz4g" Sep 30 12:33:48 crc kubenswrapper[5002]: I0930 12:33:48.314642 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwsq7\" (UniqueName: \"kubernetes.io/projected/f54da31a-6088-4cbc-bdef-51fe08af5365-kube-api-access-pwsq7\") pod \"redhat-marketplace-2rz4g\" (UID: \"f54da31a-6088-4cbc-bdef-51fe08af5365\") " pod="openshift-marketplace/redhat-marketplace-2rz4g" Sep 30 12:33:48 crc kubenswrapper[5002]: I0930 12:33:48.403245 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2rz4g" Sep 30 12:33:48 crc kubenswrapper[5002]: I0930 12:33:48.535795 5002 generic.go:334] "Generic (PLEG): container finished" podID="bd5d2600-fe6f-407b-8110-97516c3117cb" containerID="2100d126f4fad7521f2ea956ef4c7cad2e291c74cfb4177da1c0322c4b323410" exitCode=0 Sep 30 12:33:48 crc kubenswrapper[5002]: I0930 12:33:48.535882 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf" event={"ID":"bd5d2600-fe6f-407b-8110-97516c3117cb","Type":"ContainerDied","Data":"2100d126f4fad7521f2ea956ef4c7cad2e291c74cfb4177da1c0322c4b323410"} Sep 30 12:33:48 crc kubenswrapper[5002]: I0930 12:33:48.824849 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2rz4g"] Sep 30 12:33:49 crc kubenswrapper[5002]: I0930 12:33:49.543228 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2rz4g" event={"ID":"f54da31a-6088-4cbc-bdef-51fe08af5365","Type":"ContainerStarted","Data":"dd1bee02946e38b5637e974ad38a95b2e802a111caa544a22bf87b1e07c4a2cc"} Sep 30 12:33:49 crc kubenswrapper[5002]: I0930 12:33:49.543567 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2rz4g" event={"ID":"f54da31a-6088-4cbc-bdef-51fe08af5365","Type":"ContainerStarted","Data":"32e59eb6d1680ef115e9e7a9a096a3cfe767d645f739d139daf7bcebd93cfc68"} Sep 30 12:33:49 crc kubenswrapper[5002]: I0930 12:33:49.840092 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf" Sep 30 12:33:50 crc kubenswrapper[5002]: I0930 12:33:50.015697 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bd5d2600-fe6f-407b-8110-97516c3117cb-bundle\") pod \"bd5d2600-fe6f-407b-8110-97516c3117cb\" (UID: \"bd5d2600-fe6f-407b-8110-97516c3117cb\") " Sep 30 12:33:50 crc kubenswrapper[5002]: I0930 12:33:50.015794 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfnm8\" (UniqueName: \"kubernetes.io/projected/bd5d2600-fe6f-407b-8110-97516c3117cb-kube-api-access-qfnm8\") pod \"bd5d2600-fe6f-407b-8110-97516c3117cb\" (UID: \"bd5d2600-fe6f-407b-8110-97516c3117cb\") " Sep 30 12:33:50 crc kubenswrapper[5002]: I0930 12:33:50.016557 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd5d2600-fe6f-407b-8110-97516c3117cb-bundle" (OuterVolumeSpecName: "bundle") pod "bd5d2600-fe6f-407b-8110-97516c3117cb" (UID: "bd5d2600-fe6f-407b-8110-97516c3117cb"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:33:50 crc kubenswrapper[5002]: I0930 12:33:50.016871 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bd5d2600-fe6f-407b-8110-97516c3117cb-util\") pod \"bd5d2600-fe6f-407b-8110-97516c3117cb\" (UID: \"bd5d2600-fe6f-407b-8110-97516c3117cb\") " Sep 30 12:33:50 crc kubenswrapper[5002]: I0930 12:33:50.017220 5002 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bd5d2600-fe6f-407b-8110-97516c3117cb-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:33:50 crc kubenswrapper[5002]: I0930 12:33:50.028730 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd5d2600-fe6f-407b-8110-97516c3117cb-kube-api-access-qfnm8" (OuterVolumeSpecName: "kube-api-access-qfnm8") pod "bd5d2600-fe6f-407b-8110-97516c3117cb" (UID: "bd5d2600-fe6f-407b-8110-97516c3117cb"). InnerVolumeSpecName "kube-api-access-qfnm8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:33:50 crc kubenswrapper[5002]: I0930 12:33:50.033749 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd5d2600-fe6f-407b-8110-97516c3117cb-util" (OuterVolumeSpecName: "util") pod "bd5d2600-fe6f-407b-8110-97516c3117cb" (UID: "bd5d2600-fe6f-407b-8110-97516c3117cb"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:33:50 crc kubenswrapper[5002]: I0930 12:33:50.118717 5002 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bd5d2600-fe6f-407b-8110-97516c3117cb-util\") on node \"crc\" DevicePath \"\"" Sep 30 12:33:50 crc kubenswrapper[5002]: I0930 12:33:50.118763 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qfnm8\" (UniqueName: \"kubernetes.io/projected/bd5d2600-fe6f-407b-8110-97516c3117cb-kube-api-access-qfnm8\") on node \"crc\" DevicePath \"\"" Sep 30 12:33:50 crc kubenswrapper[5002]: I0930 12:33:50.551995 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf" event={"ID":"bd5d2600-fe6f-407b-8110-97516c3117cb","Type":"ContainerDied","Data":"d44734dabd5dfc3fb7c78db4a2e0f45db5d23b1fb08774ff90090ad2967b9860"} Sep 30 12:33:50 crc kubenswrapper[5002]: I0930 12:33:50.552048 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d44734dabd5dfc3fb7c78db4a2e0f45db5d23b1fb08774ff90090ad2967b9860" Sep 30 12:33:50 crc kubenswrapper[5002]: I0930 12:33:50.552018 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf" Sep 30 12:33:50 crc kubenswrapper[5002]: I0930 12:33:50.553549 5002 generic.go:334] "Generic (PLEG): container finished" podID="f54da31a-6088-4cbc-bdef-51fe08af5365" containerID="dd1bee02946e38b5637e974ad38a95b2e802a111caa544a22bf87b1e07c4a2cc" exitCode=0 Sep 30 12:33:50 crc kubenswrapper[5002]: I0930 12:33:50.553585 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2rz4g" event={"ID":"f54da31a-6088-4cbc-bdef-51fe08af5365","Type":"ContainerDied","Data":"dd1bee02946e38b5637e974ad38a95b2e802a111caa544a22bf87b1e07c4a2cc"} Sep 30 12:33:51 crc kubenswrapper[5002]: I0930 12:33:51.635064 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-j7hhf"] Sep 30 12:33:51 crc kubenswrapper[5002]: I0930 12:33:51.635685 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-j7hhf" podUID="82bc8b10-9d74-440f-8e1a-57d5153d059a" containerName="registry-server" containerID="cri-o://59a8b31dae674dfe6c098a46a7dd4219e14aa61fcde4ec220e1b471fdcdda537" gracePeriod=2 Sep 30 12:33:52 crc kubenswrapper[5002]: I0930 12:33:52.060390 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j7hhf" Sep 30 12:33:52 crc kubenswrapper[5002]: I0930 12:33:52.248205 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82bc8b10-9d74-440f-8e1a-57d5153d059a-utilities\") pod \"82bc8b10-9d74-440f-8e1a-57d5153d059a\" (UID: \"82bc8b10-9d74-440f-8e1a-57d5153d059a\") " Sep 30 12:33:52 crc kubenswrapper[5002]: I0930 12:33:52.248344 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82bc8b10-9d74-440f-8e1a-57d5153d059a-catalog-content\") pod \"82bc8b10-9d74-440f-8e1a-57d5153d059a\" (UID: \"82bc8b10-9d74-440f-8e1a-57d5153d059a\") " Sep 30 12:33:52 crc kubenswrapper[5002]: I0930 12:33:52.248453 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nwp8q\" (UniqueName: \"kubernetes.io/projected/82bc8b10-9d74-440f-8e1a-57d5153d059a-kube-api-access-nwp8q\") pod \"82bc8b10-9d74-440f-8e1a-57d5153d059a\" (UID: \"82bc8b10-9d74-440f-8e1a-57d5153d059a\") " Sep 30 12:33:52 crc kubenswrapper[5002]: I0930 12:33:52.249727 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/82bc8b10-9d74-440f-8e1a-57d5153d059a-utilities" (OuterVolumeSpecName: "utilities") pod "82bc8b10-9d74-440f-8e1a-57d5153d059a" (UID: "82bc8b10-9d74-440f-8e1a-57d5153d059a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:33:52 crc kubenswrapper[5002]: I0930 12:33:52.261844 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82bc8b10-9d74-440f-8e1a-57d5153d059a-kube-api-access-nwp8q" (OuterVolumeSpecName: "kube-api-access-nwp8q") pod "82bc8b10-9d74-440f-8e1a-57d5153d059a" (UID: "82bc8b10-9d74-440f-8e1a-57d5153d059a"). InnerVolumeSpecName "kube-api-access-nwp8q". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:33:52 crc kubenswrapper[5002]: I0930 12:33:52.302786 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/82bc8b10-9d74-440f-8e1a-57d5153d059a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "82bc8b10-9d74-440f-8e1a-57d5153d059a" (UID: "82bc8b10-9d74-440f-8e1a-57d5153d059a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:33:52 crc kubenswrapper[5002]: I0930 12:33:52.350241 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nwp8q\" (UniqueName: \"kubernetes.io/projected/82bc8b10-9d74-440f-8e1a-57d5153d059a-kube-api-access-nwp8q\") on node \"crc\" DevicePath \"\"" Sep 30 12:33:52 crc kubenswrapper[5002]: I0930 12:33:52.350275 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82bc8b10-9d74-440f-8e1a-57d5153d059a-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 12:33:52 crc kubenswrapper[5002]: I0930 12:33:52.350284 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82bc8b10-9d74-440f-8e1a-57d5153d059a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 12:33:52 crc kubenswrapper[5002]: I0930 12:33:52.579878 5002 generic.go:334] "Generic (PLEG): container finished" podID="82bc8b10-9d74-440f-8e1a-57d5153d059a" containerID="59a8b31dae674dfe6c098a46a7dd4219e14aa61fcde4ec220e1b471fdcdda537" exitCode=0 Sep 30 12:33:52 crc kubenswrapper[5002]: I0930 12:33:52.579952 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j7hhf" event={"ID":"82bc8b10-9d74-440f-8e1a-57d5153d059a","Type":"ContainerDied","Data":"59a8b31dae674dfe6c098a46a7dd4219e14aa61fcde4ec220e1b471fdcdda537"} Sep 30 12:33:52 crc kubenswrapper[5002]: I0930 12:33:52.580029 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j7hhf" event={"ID":"82bc8b10-9d74-440f-8e1a-57d5153d059a","Type":"ContainerDied","Data":"6f7530c1e1ede63ce5f521f1b6532b4f3fd1ec377f9dfa9368c9bcfa9b06d304"} Sep 30 12:33:52 crc kubenswrapper[5002]: I0930 12:33:52.580025 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j7hhf" Sep 30 12:33:52 crc kubenswrapper[5002]: I0930 12:33:52.580055 5002 scope.go:117] "RemoveContainer" containerID="59a8b31dae674dfe6c098a46a7dd4219e14aa61fcde4ec220e1b471fdcdda537" Sep 30 12:33:52 crc kubenswrapper[5002]: I0930 12:33:52.583365 5002 generic.go:334] "Generic (PLEG): container finished" podID="f54da31a-6088-4cbc-bdef-51fe08af5365" containerID="6de0735fd14288da53b70d994e80107cd042ea6a7090a2eba4db9b5586fecedb" exitCode=0 Sep 30 12:33:52 crc kubenswrapper[5002]: I0930 12:33:52.583427 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2rz4g" event={"ID":"f54da31a-6088-4cbc-bdef-51fe08af5365","Type":"ContainerDied","Data":"6de0735fd14288da53b70d994e80107cd042ea6a7090a2eba4db9b5586fecedb"} Sep 30 12:33:52 crc kubenswrapper[5002]: I0930 12:33:52.621000 5002 scope.go:117] "RemoveContainer" containerID="0de6729627aee82c1abe9e935387d60c54e182bfdaefff82b5eebf68e735c838" Sep 30 12:33:52 crc kubenswrapper[5002]: I0930 12:33:52.645829 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-j7hhf"] Sep 30 12:33:52 crc kubenswrapper[5002]: I0930 12:33:52.651521 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-j7hhf"] Sep 30 12:33:52 crc kubenswrapper[5002]: I0930 12:33:52.659099 5002 scope.go:117] "RemoveContainer" containerID="6d111258697baaa574b42e1081557ee109f37e825eeb9dec787480e442b1ac3a" Sep 30 12:33:52 crc kubenswrapper[5002]: I0930 12:33:52.687839 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82bc8b10-9d74-440f-8e1a-57d5153d059a" path="/var/lib/kubelet/pods/82bc8b10-9d74-440f-8e1a-57d5153d059a/volumes" Sep 30 12:33:52 crc kubenswrapper[5002]: I0930 12:33:52.690658 5002 scope.go:117] "RemoveContainer" containerID="59a8b31dae674dfe6c098a46a7dd4219e14aa61fcde4ec220e1b471fdcdda537" Sep 30 12:33:52 crc kubenswrapper[5002]: E0930 12:33:52.691566 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59a8b31dae674dfe6c098a46a7dd4219e14aa61fcde4ec220e1b471fdcdda537\": container with ID starting with 59a8b31dae674dfe6c098a46a7dd4219e14aa61fcde4ec220e1b471fdcdda537 not found: ID does not exist" containerID="59a8b31dae674dfe6c098a46a7dd4219e14aa61fcde4ec220e1b471fdcdda537" Sep 30 12:33:52 crc kubenswrapper[5002]: I0930 12:33:52.691628 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59a8b31dae674dfe6c098a46a7dd4219e14aa61fcde4ec220e1b471fdcdda537"} err="failed to get container status \"59a8b31dae674dfe6c098a46a7dd4219e14aa61fcde4ec220e1b471fdcdda537\": rpc error: code = NotFound desc = could not find container \"59a8b31dae674dfe6c098a46a7dd4219e14aa61fcde4ec220e1b471fdcdda537\": container with ID starting with 59a8b31dae674dfe6c098a46a7dd4219e14aa61fcde4ec220e1b471fdcdda537 not found: ID does not exist" Sep 30 12:33:52 crc kubenswrapper[5002]: I0930 12:33:52.691672 5002 scope.go:117] "RemoveContainer" containerID="0de6729627aee82c1abe9e935387d60c54e182bfdaefff82b5eebf68e735c838" Sep 30 12:33:52 crc kubenswrapper[5002]: E0930 12:33:52.692380 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0de6729627aee82c1abe9e935387d60c54e182bfdaefff82b5eebf68e735c838\": container with ID starting with 0de6729627aee82c1abe9e935387d60c54e182bfdaefff82b5eebf68e735c838 not found: ID does not exist" containerID="0de6729627aee82c1abe9e935387d60c54e182bfdaefff82b5eebf68e735c838" Sep 30 12:33:52 crc kubenswrapper[5002]: I0930 12:33:52.692428 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0de6729627aee82c1abe9e935387d60c54e182bfdaefff82b5eebf68e735c838"} err="failed to get container status \"0de6729627aee82c1abe9e935387d60c54e182bfdaefff82b5eebf68e735c838\": rpc error: code = NotFound desc = could not find container \"0de6729627aee82c1abe9e935387d60c54e182bfdaefff82b5eebf68e735c838\": container with ID starting with 0de6729627aee82c1abe9e935387d60c54e182bfdaefff82b5eebf68e735c838 not found: ID does not exist" Sep 30 12:33:52 crc kubenswrapper[5002]: I0930 12:33:52.692491 5002 scope.go:117] "RemoveContainer" containerID="6d111258697baaa574b42e1081557ee109f37e825eeb9dec787480e442b1ac3a" Sep 30 12:33:52 crc kubenswrapper[5002]: E0930 12:33:52.692880 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d111258697baaa574b42e1081557ee109f37e825eeb9dec787480e442b1ac3a\": container with ID starting with 6d111258697baaa574b42e1081557ee109f37e825eeb9dec787480e442b1ac3a not found: ID does not exist" containerID="6d111258697baaa574b42e1081557ee109f37e825eeb9dec787480e442b1ac3a" Sep 30 12:33:52 crc kubenswrapper[5002]: I0930 12:33:52.692905 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d111258697baaa574b42e1081557ee109f37e825eeb9dec787480e442b1ac3a"} err="failed to get container status \"6d111258697baaa574b42e1081557ee109f37e825eeb9dec787480e442b1ac3a\": rpc error: code = NotFound desc = could not find container \"6d111258697baaa574b42e1081557ee109f37e825eeb9dec787480e442b1ac3a\": container with ID starting with 6d111258697baaa574b42e1081557ee109f37e825eeb9dec787480e442b1ac3a not found: ID does not exist" Sep 30 12:33:53 crc kubenswrapper[5002]: I0930 12:33:53.594782 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2rz4g" event={"ID":"f54da31a-6088-4cbc-bdef-51fe08af5365","Type":"ContainerStarted","Data":"c3028b056989b8bcfb9d1a76a700e5ab614fba653de1eb8a8c40dfecda2dd588"} Sep 30 12:33:53 crc kubenswrapper[5002]: I0930 12:33:53.614035 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2rz4g" podStartSLOduration=2.989330755 podStartE2EDuration="5.613989751s" podCreationTimestamp="2025-09-30 12:33:48 +0000 UTC" firstStartedPulling="2025-09-30 12:33:50.555095581 +0000 UTC m=+804.804777727" lastFinishedPulling="2025-09-30 12:33:53.179754537 +0000 UTC m=+807.429436723" observedRunningTime="2025-09-30 12:33:53.610668368 +0000 UTC m=+807.860350524" watchObservedRunningTime="2025-09-30 12:33:53.613989751 +0000 UTC m=+807.863671897" Sep 30 12:33:55 crc kubenswrapper[5002]: I0930 12:33:55.276030 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-b45798bf6-lpg2f"] Sep 30 12:33:55 crc kubenswrapper[5002]: E0930 12:33:55.276576 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd5d2600-fe6f-407b-8110-97516c3117cb" containerName="extract" Sep 30 12:33:55 crc kubenswrapper[5002]: I0930 12:33:55.276592 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd5d2600-fe6f-407b-8110-97516c3117cb" containerName="extract" Sep 30 12:33:55 crc kubenswrapper[5002]: E0930 12:33:55.276601 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82bc8b10-9d74-440f-8e1a-57d5153d059a" containerName="extract-content" Sep 30 12:33:55 crc kubenswrapper[5002]: I0930 12:33:55.276607 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="82bc8b10-9d74-440f-8e1a-57d5153d059a" containerName="extract-content" Sep 30 12:33:55 crc kubenswrapper[5002]: E0930 12:33:55.276614 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd5d2600-fe6f-407b-8110-97516c3117cb" containerName="pull" Sep 30 12:33:55 crc kubenswrapper[5002]: I0930 12:33:55.276620 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd5d2600-fe6f-407b-8110-97516c3117cb" containerName="pull" Sep 30 12:33:55 crc kubenswrapper[5002]: E0930 12:33:55.276631 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd5d2600-fe6f-407b-8110-97516c3117cb" containerName="util" Sep 30 12:33:55 crc kubenswrapper[5002]: I0930 12:33:55.276636 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd5d2600-fe6f-407b-8110-97516c3117cb" containerName="util" Sep 30 12:33:55 crc kubenswrapper[5002]: E0930 12:33:55.276648 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82bc8b10-9d74-440f-8e1a-57d5153d059a" containerName="extract-utilities" Sep 30 12:33:55 crc kubenswrapper[5002]: I0930 12:33:55.276654 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="82bc8b10-9d74-440f-8e1a-57d5153d059a" containerName="extract-utilities" Sep 30 12:33:55 crc kubenswrapper[5002]: E0930 12:33:55.276662 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82bc8b10-9d74-440f-8e1a-57d5153d059a" containerName="registry-server" Sep 30 12:33:55 crc kubenswrapper[5002]: I0930 12:33:55.276668 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="82bc8b10-9d74-440f-8e1a-57d5153d059a" containerName="registry-server" Sep 30 12:33:55 crc kubenswrapper[5002]: I0930 12:33:55.276764 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd5d2600-fe6f-407b-8110-97516c3117cb" containerName="extract" Sep 30 12:33:55 crc kubenswrapper[5002]: I0930 12:33:55.276776 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="82bc8b10-9d74-440f-8e1a-57d5153d059a" containerName="registry-server" Sep 30 12:33:55 crc kubenswrapper[5002]: I0930 12:33:55.277360 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-b45798bf6-lpg2f" Sep 30 12:33:55 crc kubenswrapper[5002]: I0930 12:33:55.283037 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-hcq28" Sep 30 12:33:55 crc kubenswrapper[5002]: I0930 12:33:55.317982 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-b45798bf6-lpg2f"] Sep 30 12:33:55 crc kubenswrapper[5002]: I0930 12:33:55.389596 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9c5qx\" (UniqueName: \"kubernetes.io/projected/362e8e35-0b7c-4c4f-9db2-9c3d56a76d2f-kube-api-access-9c5qx\") pod \"openstack-operator-controller-operator-b45798bf6-lpg2f\" (UID: \"362e8e35-0b7c-4c4f-9db2-9c3d56a76d2f\") " pod="openstack-operators/openstack-operator-controller-operator-b45798bf6-lpg2f" Sep 30 12:33:55 crc kubenswrapper[5002]: I0930 12:33:55.491268 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9c5qx\" (UniqueName: \"kubernetes.io/projected/362e8e35-0b7c-4c4f-9db2-9c3d56a76d2f-kube-api-access-9c5qx\") pod \"openstack-operator-controller-operator-b45798bf6-lpg2f\" (UID: \"362e8e35-0b7c-4c4f-9db2-9c3d56a76d2f\") " pod="openstack-operators/openstack-operator-controller-operator-b45798bf6-lpg2f" Sep 30 12:33:55 crc kubenswrapper[5002]: I0930 12:33:55.514573 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9c5qx\" (UniqueName: \"kubernetes.io/projected/362e8e35-0b7c-4c4f-9db2-9c3d56a76d2f-kube-api-access-9c5qx\") pod \"openstack-operator-controller-operator-b45798bf6-lpg2f\" (UID: \"362e8e35-0b7c-4c4f-9db2-9c3d56a76d2f\") " pod="openstack-operators/openstack-operator-controller-operator-b45798bf6-lpg2f" Sep 30 12:33:55 crc kubenswrapper[5002]: I0930 12:33:55.595060 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-b45798bf6-lpg2f" Sep 30 12:33:56 crc kubenswrapper[5002]: I0930 12:33:56.106451 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-b45798bf6-lpg2f"] Sep 30 12:33:56 crc kubenswrapper[5002]: I0930 12:33:56.623400 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-b45798bf6-lpg2f" event={"ID":"362e8e35-0b7c-4c4f-9db2-9c3d56a76d2f","Type":"ContainerStarted","Data":"b13db5064b7a779aa553d45f8abba805292312b469625010e53e56417aa9716e"} Sep 30 12:33:58 crc kubenswrapper[5002]: I0930 12:33:58.403843 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2rz4g" Sep 30 12:33:58 crc kubenswrapper[5002]: I0930 12:33:58.404192 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2rz4g" Sep 30 12:33:58 crc kubenswrapper[5002]: I0930 12:33:58.461443 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2rz4g" Sep 30 12:33:58 crc kubenswrapper[5002]: I0930 12:33:58.723586 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2rz4g" Sep 30 12:34:00 crc kubenswrapper[5002]: I0930 12:34:00.652950 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-b45798bf6-lpg2f" event={"ID":"362e8e35-0b7c-4c4f-9db2-9c3d56a76d2f","Type":"ContainerStarted","Data":"a0b142603ed061b41529945ade113b63d8a017f1938015d535c9ea70f1876969"} Sep 30 12:34:01 crc kubenswrapper[5002]: I0930 12:34:01.833425 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2rz4g"] Sep 30 12:34:01 crc kubenswrapper[5002]: I0930 12:34:01.833854 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-2rz4g" podUID="f54da31a-6088-4cbc-bdef-51fe08af5365" containerName="registry-server" containerID="cri-o://c3028b056989b8bcfb9d1a76a700e5ab614fba653de1eb8a8c40dfecda2dd588" gracePeriod=2 Sep 30 12:34:02 crc kubenswrapper[5002]: I0930 12:34:02.308054 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2rz4g" Sep 30 12:34:02 crc kubenswrapper[5002]: I0930 12:34:02.395443 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f54da31a-6088-4cbc-bdef-51fe08af5365-utilities\") pod \"f54da31a-6088-4cbc-bdef-51fe08af5365\" (UID: \"f54da31a-6088-4cbc-bdef-51fe08af5365\") " Sep 30 12:34:02 crc kubenswrapper[5002]: I0930 12:34:02.395678 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f54da31a-6088-4cbc-bdef-51fe08af5365-catalog-content\") pod \"f54da31a-6088-4cbc-bdef-51fe08af5365\" (UID: \"f54da31a-6088-4cbc-bdef-51fe08af5365\") " Sep 30 12:34:02 crc kubenswrapper[5002]: I0930 12:34:02.395716 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pwsq7\" (UniqueName: \"kubernetes.io/projected/f54da31a-6088-4cbc-bdef-51fe08af5365-kube-api-access-pwsq7\") pod \"f54da31a-6088-4cbc-bdef-51fe08af5365\" (UID: \"f54da31a-6088-4cbc-bdef-51fe08af5365\") " Sep 30 12:34:02 crc kubenswrapper[5002]: I0930 12:34:02.396245 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f54da31a-6088-4cbc-bdef-51fe08af5365-utilities" (OuterVolumeSpecName: "utilities") pod "f54da31a-6088-4cbc-bdef-51fe08af5365" (UID: "f54da31a-6088-4cbc-bdef-51fe08af5365"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:34:02 crc kubenswrapper[5002]: I0930 12:34:02.401645 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f54da31a-6088-4cbc-bdef-51fe08af5365-kube-api-access-pwsq7" (OuterVolumeSpecName: "kube-api-access-pwsq7") pod "f54da31a-6088-4cbc-bdef-51fe08af5365" (UID: "f54da31a-6088-4cbc-bdef-51fe08af5365"). InnerVolumeSpecName "kube-api-access-pwsq7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:34:02 crc kubenswrapper[5002]: I0930 12:34:02.412519 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f54da31a-6088-4cbc-bdef-51fe08af5365-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f54da31a-6088-4cbc-bdef-51fe08af5365" (UID: "f54da31a-6088-4cbc-bdef-51fe08af5365"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:34:02 crc kubenswrapper[5002]: I0930 12:34:02.497143 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f54da31a-6088-4cbc-bdef-51fe08af5365-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 12:34:02 crc kubenswrapper[5002]: I0930 12:34:02.497184 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f54da31a-6088-4cbc-bdef-51fe08af5365-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 12:34:02 crc kubenswrapper[5002]: I0930 12:34:02.497198 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pwsq7\" (UniqueName: \"kubernetes.io/projected/f54da31a-6088-4cbc-bdef-51fe08af5365-kube-api-access-pwsq7\") on node \"crc\" DevicePath \"\"" Sep 30 12:34:02 crc kubenswrapper[5002]: I0930 12:34:02.668813 5002 generic.go:334] "Generic (PLEG): container finished" podID="f54da31a-6088-4cbc-bdef-51fe08af5365" containerID="c3028b056989b8bcfb9d1a76a700e5ab614fba653de1eb8a8c40dfecda2dd588" exitCode=0 Sep 30 12:34:02 crc kubenswrapper[5002]: I0930 12:34:02.668878 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2rz4g" event={"ID":"f54da31a-6088-4cbc-bdef-51fe08af5365","Type":"ContainerDied","Data":"c3028b056989b8bcfb9d1a76a700e5ab614fba653de1eb8a8c40dfecda2dd588"} Sep 30 12:34:02 crc kubenswrapper[5002]: I0930 12:34:02.668908 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2rz4g" event={"ID":"f54da31a-6088-4cbc-bdef-51fe08af5365","Type":"ContainerDied","Data":"32e59eb6d1680ef115e9e7a9a096a3cfe767d645f739d139daf7bcebd93cfc68"} Sep 30 12:34:02 crc kubenswrapper[5002]: I0930 12:34:02.668930 5002 scope.go:117] "RemoveContainer" containerID="c3028b056989b8bcfb9d1a76a700e5ab614fba653de1eb8a8c40dfecda2dd588" Sep 30 12:34:02 crc kubenswrapper[5002]: I0930 12:34:02.669047 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2rz4g" Sep 30 12:34:02 crc kubenswrapper[5002]: I0930 12:34:02.674011 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-b45798bf6-lpg2f" event={"ID":"362e8e35-0b7c-4c4f-9db2-9c3d56a76d2f","Type":"ContainerStarted","Data":"7b49b836deb0f00be6e423b0a0675f916c28a1c55e4b19794954dd2dcceb425b"} Sep 30 12:34:02 crc kubenswrapper[5002]: I0930 12:34:02.674188 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-b45798bf6-lpg2f" Sep 30 12:34:02 crc kubenswrapper[5002]: I0930 12:34:02.691726 5002 scope.go:117] "RemoveContainer" containerID="6de0735fd14288da53b70d994e80107cd042ea6a7090a2eba4db9b5586fecedb" Sep 30 12:34:02 crc kubenswrapper[5002]: I0930 12:34:02.715945 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-b45798bf6-lpg2f" podStartSLOduration=1.7578720049999998 podStartE2EDuration="7.715931549s" podCreationTimestamp="2025-09-30 12:33:55 +0000 UTC" firstStartedPulling="2025-09-30 12:33:56.117392923 +0000 UTC m=+810.367075069" lastFinishedPulling="2025-09-30 12:34:02.075452457 +0000 UTC m=+816.325134613" observedRunningTime="2025-09-30 12:34:02.709495431 +0000 UTC m=+816.959177617" watchObservedRunningTime="2025-09-30 12:34:02.715931549 +0000 UTC m=+816.965613695" Sep 30 12:34:02 crc kubenswrapper[5002]: I0930 12:34:02.724331 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2rz4g"] Sep 30 12:34:02 crc kubenswrapper[5002]: I0930 12:34:02.726166 5002 scope.go:117] "RemoveContainer" containerID="dd1bee02946e38b5637e974ad38a95b2e802a111caa544a22bf87b1e07c4a2cc" Sep 30 12:34:02 crc kubenswrapper[5002]: I0930 12:34:02.728234 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-2rz4g"] Sep 30 12:34:02 crc kubenswrapper[5002]: I0930 12:34:02.743377 5002 scope.go:117] "RemoveContainer" containerID="c3028b056989b8bcfb9d1a76a700e5ab614fba653de1eb8a8c40dfecda2dd588" Sep 30 12:34:02 crc kubenswrapper[5002]: E0930 12:34:02.743856 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3028b056989b8bcfb9d1a76a700e5ab614fba653de1eb8a8c40dfecda2dd588\": container with ID starting with c3028b056989b8bcfb9d1a76a700e5ab614fba653de1eb8a8c40dfecda2dd588 not found: ID does not exist" containerID="c3028b056989b8bcfb9d1a76a700e5ab614fba653de1eb8a8c40dfecda2dd588" Sep 30 12:34:02 crc kubenswrapper[5002]: I0930 12:34:02.743902 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3028b056989b8bcfb9d1a76a700e5ab614fba653de1eb8a8c40dfecda2dd588"} err="failed to get container status \"c3028b056989b8bcfb9d1a76a700e5ab614fba653de1eb8a8c40dfecda2dd588\": rpc error: code = NotFound desc = could not find container \"c3028b056989b8bcfb9d1a76a700e5ab614fba653de1eb8a8c40dfecda2dd588\": container with ID starting with c3028b056989b8bcfb9d1a76a700e5ab614fba653de1eb8a8c40dfecda2dd588 not found: ID does not exist" Sep 30 12:34:02 crc kubenswrapper[5002]: I0930 12:34:02.743928 5002 scope.go:117] "RemoveContainer" containerID="6de0735fd14288da53b70d994e80107cd042ea6a7090a2eba4db9b5586fecedb" Sep 30 12:34:02 crc kubenswrapper[5002]: E0930 12:34:02.744228 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6de0735fd14288da53b70d994e80107cd042ea6a7090a2eba4db9b5586fecedb\": container with ID starting with 6de0735fd14288da53b70d994e80107cd042ea6a7090a2eba4db9b5586fecedb not found: ID does not exist" containerID="6de0735fd14288da53b70d994e80107cd042ea6a7090a2eba4db9b5586fecedb" Sep 30 12:34:02 crc kubenswrapper[5002]: I0930 12:34:02.744263 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6de0735fd14288da53b70d994e80107cd042ea6a7090a2eba4db9b5586fecedb"} err="failed to get container status \"6de0735fd14288da53b70d994e80107cd042ea6a7090a2eba4db9b5586fecedb\": rpc error: code = NotFound desc = could not find container \"6de0735fd14288da53b70d994e80107cd042ea6a7090a2eba4db9b5586fecedb\": container with ID starting with 6de0735fd14288da53b70d994e80107cd042ea6a7090a2eba4db9b5586fecedb not found: ID does not exist" Sep 30 12:34:02 crc kubenswrapper[5002]: I0930 12:34:02.744285 5002 scope.go:117] "RemoveContainer" containerID="dd1bee02946e38b5637e974ad38a95b2e802a111caa544a22bf87b1e07c4a2cc" Sep 30 12:34:02 crc kubenswrapper[5002]: E0930 12:34:02.744646 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd1bee02946e38b5637e974ad38a95b2e802a111caa544a22bf87b1e07c4a2cc\": container with ID starting with dd1bee02946e38b5637e974ad38a95b2e802a111caa544a22bf87b1e07c4a2cc not found: ID does not exist" containerID="dd1bee02946e38b5637e974ad38a95b2e802a111caa544a22bf87b1e07c4a2cc" Sep 30 12:34:02 crc kubenswrapper[5002]: I0930 12:34:02.744681 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd1bee02946e38b5637e974ad38a95b2e802a111caa544a22bf87b1e07c4a2cc"} err="failed to get container status \"dd1bee02946e38b5637e974ad38a95b2e802a111caa544a22bf87b1e07c4a2cc\": rpc error: code = NotFound desc = could not find container \"dd1bee02946e38b5637e974ad38a95b2e802a111caa544a22bf87b1e07c4a2cc\": container with ID starting with dd1bee02946e38b5637e974ad38a95b2e802a111caa544a22bf87b1e07c4a2cc not found: ID does not exist" Sep 30 12:34:04 crc kubenswrapper[5002]: I0930 12:34:04.689229 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f54da31a-6088-4cbc-bdef-51fe08af5365" path="/var/lib/kubelet/pods/f54da31a-6088-4cbc-bdef-51fe08af5365/volumes" Sep 30 12:34:05 crc kubenswrapper[5002]: I0930 12:34:05.598411 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-b45798bf6-lpg2f" Sep 30 12:34:22 crc kubenswrapper[5002]: I0930 12:34:22.848734 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-mf4jp"] Sep 30 12:34:22 crc kubenswrapper[5002]: E0930 12:34:22.849460 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f54da31a-6088-4cbc-bdef-51fe08af5365" containerName="extract-utilities" Sep 30 12:34:22 crc kubenswrapper[5002]: I0930 12:34:22.849491 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f54da31a-6088-4cbc-bdef-51fe08af5365" containerName="extract-utilities" Sep 30 12:34:22 crc kubenswrapper[5002]: E0930 12:34:22.849502 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f54da31a-6088-4cbc-bdef-51fe08af5365" containerName="registry-server" Sep 30 12:34:22 crc kubenswrapper[5002]: I0930 12:34:22.849508 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f54da31a-6088-4cbc-bdef-51fe08af5365" containerName="registry-server" Sep 30 12:34:22 crc kubenswrapper[5002]: E0930 12:34:22.849522 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f54da31a-6088-4cbc-bdef-51fe08af5365" containerName="extract-content" Sep 30 12:34:22 crc kubenswrapper[5002]: I0930 12:34:22.849528 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f54da31a-6088-4cbc-bdef-51fe08af5365" containerName="extract-content" Sep 30 12:34:22 crc kubenswrapper[5002]: I0930 12:34:22.849624 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f54da31a-6088-4cbc-bdef-51fe08af5365" containerName="registry-server" Sep 30 12:34:22 crc kubenswrapper[5002]: I0930 12:34:22.850186 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-mf4jp" Sep 30 12:34:22 crc kubenswrapper[5002]: I0930 12:34:22.856790 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-dhkx2" Sep 30 12:34:22 crc kubenswrapper[5002]: I0930 12:34:22.872259 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-ph95z"] Sep 30 12:34:22 crc kubenswrapper[5002]: I0930 12:34:22.873255 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-ph95z" Sep 30 12:34:22 crc kubenswrapper[5002]: I0930 12:34:22.879575 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-5njbq" Sep 30 12:34:22 crc kubenswrapper[5002]: I0930 12:34:22.882321 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-mf4jp"] Sep 30 12:34:22 crc kubenswrapper[5002]: I0930 12:34:22.887361 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-rvv9g"] Sep 30 12:34:22 crc kubenswrapper[5002]: I0930 12:34:22.888728 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-rvv9g" Sep 30 12:34:22 crc kubenswrapper[5002]: I0930 12:34:22.896047 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-6v47q" Sep 30 12:34:22 crc kubenswrapper[5002]: I0930 12:34:22.907916 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-ph95z"] Sep 30 12:34:22 crc kubenswrapper[5002]: I0930 12:34:22.917544 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-rvv9g"] Sep 30 12:34:22 crc kubenswrapper[5002]: I0930 12:34:22.969738 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-chbd7"] Sep 30 12:34:22 crc kubenswrapper[5002]: I0930 12:34:22.971014 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-chbd7" Sep 30 12:34:22 crc kubenswrapper[5002]: I0930 12:34:22.972352 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbs6c\" (UniqueName: \"kubernetes.io/projected/636ed6c4-281d-4ea2-be99-a04e07b08170-kube-api-access-tbs6c\") pod \"barbican-operator-controller-manager-6ff8b75857-mf4jp\" (UID: \"636ed6c4-281d-4ea2-be99-a04e07b08170\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-mf4jp" Sep 30 12:34:22 crc kubenswrapper[5002]: I0930 12:34:22.972390 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxbvn\" (UniqueName: \"kubernetes.io/projected/8eefd962-2b74-4b77-8bc9-338b8ccfd0cf-kube-api-access-dxbvn\") pod \"cinder-operator-controller-manager-644bddb6d8-ph95z\" (UID: \"8eefd962-2b74-4b77-8bc9-338b8ccfd0cf\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-ph95z" Sep 30 12:34:22 crc kubenswrapper[5002]: I0930 12:34:22.972454 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vshc6\" (UniqueName: \"kubernetes.io/projected/9caa184d-b4ff-4419-8f8d-ede2b0b6845e-kube-api-access-vshc6\") pod \"designate-operator-controller-manager-84f4f7b77b-rvv9g\" (UID: \"9caa184d-b4ff-4419-8f8d-ede2b0b6845e\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-rvv9g" Sep 30 12:34:22 crc kubenswrapper[5002]: I0930 12:34:22.973503 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-9455j" Sep 30 12:34:22 crc kubenswrapper[5002]: I0930 12:34:22.996048 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-chbd7"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.020725 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-sjnlc"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.021958 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-sjnlc" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.027534 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-42j7m"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.028727 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-42j7m" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.031112 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-s6hnn" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.031929 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-8d89n" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.039697 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-sjnlc"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.059083 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-42j7m"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.064706 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d857cc749-qcq76"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.065885 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-qcq76" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.068582 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-mfrk8" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.068750 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.073760 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbs6c\" (UniqueName: \"kubernetes.io/projected/636ed6c4-281d-4ea2-be99-a04e07b08170-kube-api-access-tbs6c\") pod \"barbican-operator-controller-manager-6ff8b75857-mf4jp\" (UID: \"636ed6c4-281d-4ea2-be99-a04e07b08170\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-mf4jp" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.073820 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxbvn\" (UniqueName: \"kubernetes.io/projected/8eefd962-2b74-4b77-8bc9-338b8ccfd0cf-kube-api-access-dxbvn\") pod \"cinder-operator-controller-manager-644bddb6d8-ph95z\" (UID: \"8eefd962-2b74-4b77-8bc9-338b8ccfd0cf\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-ph95z" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.073880 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vshc6\" (UniqueName: \"kubernetes.io/projected/9caa184d-b4ff-4419-8f8d-ede2b0b6845e-kube-api-access-vshc6\") pod \"designate-operator-controller-manager-84f4f7b77b-rvv9g\" (UID: \"9caa184d-b4ff-4419-8f8d-ede2b0b6845e\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-rvv9g" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.073903 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4rrj\" (UniqueName: \"kubernetes.io/projected/d44fe72c-afa7-4442-b308-0b111e16c7b8-kube-api-access-g4rrj\") pod \"glance-operator-controller-manager-84958c4d49-chbd7\" (UID: \"d44fe72c-afa7-4442-b308-0b111e16c7b8\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-chbd7" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.091308 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-r7t4w"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.099583 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d857cc749-qcq76"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.099733 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-r7t4w" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.107304 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-r7t4w"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.113785 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-zwhrb" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.115585 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-m7m4h"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.117625 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-m7m4h" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.119993 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxbvn\" (UniqueName: \"kubernetes.io/projected/8eefd962-2b74-4b77-8bc9-338b8ccfd0cf-kube-api-access-dxbvn\") pod \"cinder-operator-controller-manager-644bddb6d8-ph95z\" (UID: \"8eefd962-2b74-4b77-8bc9-338b8ccfd0cf\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-ph95z" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.126637 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-gk987" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.127022 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbs6c\" (UniqueName: \"kubernetes.io/projected/636ed6c4-281d-4ea2-be99-a04e07b08170-kube-api-access-tbs6c\") pod \"barbican-operator-controller-manager-6ff8b75857-mf4jp\" (UID: \"636ed6c4-281d-4ea2-be99-a04e07b08170\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-mf4jp" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.128609 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-m7m4h"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.141827 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vshc6\" (UniqueName: \"kubernetes.io/projected/9caa184d-b4ff-4419-8f8d-ede2b0b6845e-kube-api-access-vshc6\") pod \"designate-operator-controller-manager-84f4f7b77b-rvv9g\" (UID: \"9caa184d-b4ff-4419-8f8d-ede2b0b6845e\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-rvv9g" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.145566 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-zzt9s"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.146797 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-zzt9s" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.150325 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-2w7qx" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.159646 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-sxv7p"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.160681 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-88c7-sxv7p" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.173630 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-hcr66"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.173973 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-mf4jp" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.174707 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-hcr66" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.174923 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4rrj\" (UniqueName: \"kubernetes.io/projected/d44fe72c-afa7-4442-b308-0b111e16c7b8-kube-api-access-g4rrj\") pod \"glance-operator-controller-manager-84958c4d49-chbd7\" (UID: \"d44fe72c-afa7-4442-b308-0b111e16c7b8\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-chbd7" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.174948 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wht9p\" (UniqueName: \"kubernetes.io/projected/720e0316-9060-4bd3-804c-f98017a3fb84-kube-api-access-wht9p\") pod \"ironic-operator-controller-manager-7975b88857-r7t4w\" (UID: \"720e0316-9060-4bd3-804c-f98017a3fb84\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-r7t4w" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.174973 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6d6qb\" (UniqueName: \"kubernetes.io/projected/7c75d96a-d0a7-4f12-9799-4d01ee215248-kube-api-access-6d6qb\") pod \"infra-operator-controller-manager-7d857cc749-qcq76\" (UID: \"7c75d96a-d0a7-4f12-9799-4d01ee215248\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-qcq76" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.174991 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7c75d96a-d0a7-4f12-9799-4d01ee215248-cert\") pod \"infra-operator-controller-manager-7d857cc749-qcq76\" (UID: \"7c75d96a-d0a7-4f12-9799-4d01ee215248\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-qcq76" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.175024 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fs68m\" (UniqueName: \"kubernetes.io/projected/63ee8874-1cbb-4183-b16a-f2efd8a1e7d6-kube-api-access-fs68m\") pod \"heat-operator-controller-manager-5d889d78cf-sjnlc\" (UID: \"63ee8874-1cbb-4183-b16a-f2efd8a1e7d6\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-sjnlc" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.175067 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6m7jf\" (UniqueName: \"kubernetes.io/projected/aa7bbe9e-668f-4ee3-976d-8e9ddbdbb092-kube-api-access-6m7jf\") pod \"horizon-operator-controller-manager-9f4696d94-42j7m\" (UID: \"aa7bbe9e-668f-4ee3-976d-8e9ddbdbb092\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-42j7m" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.178366 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-9qzjl" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.178628 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-c8bkf" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.183331 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-zzt9s"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.187892 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-hcr66"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.191978 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-ph95z" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.198055 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4rrj\" (UniqueName: \"kubernetes.io/projected/d44fe72c-afa7-4442-b308-0b111e16c7b8-kube-api-access-g4rrj\") pod \"glance-operator-controller-manager-84958c4d49-chbd7\" (UID: \"d44fe72c-afa7-4442-b308-0b111e16c7b8\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-chbd7" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.201635 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-sxv7p"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.216766 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-rvv9g" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.229542 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-gh66v"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.230548 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-gh66v" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.235895 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-xwlmz"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.236895 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-xwlmz" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.238250 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-zwbgx" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.239536 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-vwhxj" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.271956 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-gh66v"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.278156 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-xwlmz"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.278913 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fs68m\" (UniqueName: \"kubernetes.io/projected/63ee8874-1cbb-4183-b16a-f2efd8a1e7d6-kube-api-access-fs68m\") pod \"heat-operator-controller-manager-5d889d78cf-sjnlc\" (UID: \"63ee8874-1cbb-4183-b16a-f2efd8a1e7d6\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-sjnlc" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.278953 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxznm\" (UniqueName: \"kubernetes.io/projected/9692c8b9-1e89-47e3-972c-1af7eb8a2ebe-kube-api-access-nxznm\") pod \"keystone-operator-controller-manager-5bd55b4bff-m7m4h\" (UID: \"9692c8b9-1e89-47e3-972c-1af7eb8a2ebe\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-m7m4h" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.278981 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tmdc\" (UniqueName: \"kubernetes.io/projected/aa376f72-5b9a-4087-8ea6-a5cf80be315b-kube-api-access-8tmdc\") pod \"mariadb-operator-controller-manager-88c7-sxv7p\" (UID: \"aa376f72-5b9a-4087-8ea6-a5cf80be315b\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-sxv7p" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.279010 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6m7jf\" (UniqueName: \"kubernetes.io/projected/aa7bbe9e-668f-4ee3-976d-8e9ddbdbb092-kube-api-access-6m7jf\") pod \"horizon-operator-controller-manager-9f4696d94-42j7m\" (UID: \"aa7bbe9e-668f-4ee3-976d-8e9ddbdbb092\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-42j7m" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.279030 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpct8\" (UniqueName: \"kubernetes.io/projected/855868b4-991b-4f9f-b471-5b1244221192-kube-api-access-zpct8\") pod \"manila-operator-controller-manager-6d68dbc695-zzt9s\" (UID: \"855868b4-991b-4f9f-b471-5b1244221192\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-zzt9s" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.279056 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wh6bb\" (UniqueName: \"kubernetes.io/projected/6413b1d0-7f0f-4bca-88e9-90a9d78bff9c-kube-api-access-wh6bb\") pod \"neutron-operator-controller-manager-64d7b59854-hcr66\" (UID: \"6413b1d0-7f0f-4bca-88e9-90a9d78bff9c\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-hcr66" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.279085 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wht9p\" (UniqueName: \"kubernetes.io/projected/720e0316-9060-4bd3-804c-f98017a3fb84-kube-api-access-wht9p\") pod \"ironic-operator-controller-manager-7975b88857-r7t4w\" (UID: \"720e0316-9060-4bd3-804c-f98017a3fb84\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-r7t4w" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.279109 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6d6qb\" (UniqueName: \"kubernetes.io/projected/7c75d96a-d0a7-4f12-9799-4d01ee215248-kube-api-access-6d6qb\") pod \"infra-operator-controller-manager-7d857cc749-qcq76\" (UID: \"7c75d96a-d0a7-4f12-9799-4d01ee215248\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-qcq76" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.279131 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7c75d96a-d0a7-4f12-9799-4d01ee215248-cert\") pod \"infra-operator-controller-manager-7d857cc749-qcq76\" (UID: \"7c75d96a-d0a7-4f12-9799-4d01ee215248\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-qcq76" Sep 30 12:34:23 crc kubenswrapper[5002]: E0930 12:34:23.279254 5002 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Sep 30 12:34:23 crc kubenswrapper[5002]: E0930 12:34:23.279302 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7c75d96a-d0a7-4f12-9799-4d01ee215248-cert podName:7c75d96a-d0a7-4f12-9799-4d01ee215248 nodeName:}" failed. No retries permitted until 2025-09-30 12:34:23.779284645 +0000 UTC m=+838.028966791 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/7c75d96a-d0a7-4f12-9799-4d01ee215248-cert") pod "infra-operator-controller-manager-7d857cc749-qcq76" (UID: "7c75d96a-d0a7-4f12-9799-4d01ee215248") : secret "infra-operator-webhook-server-cert" not found Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.299243 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6m7jf\" (UniqueName: \"kubernetes.io/projected/aa7bbe9e-668f-4ee3-976d-8e9ddbdbb092-kube-api-access-6m7jf\") pod \"horizon-operator-controller-manager-9f4696d94-42j7m\" (UID: \"aa7bbe9e-668f-4ee3-976d-8e9ddbdbb092\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-42j7m" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.299606 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wht9p\" (UniqueName: \"kubernetes.io/projected/720e0316-9060-4bd3-804c-f98017a3fb84-kube-api-access-wht9p\") pod \"ironic-operator-controller-manager-7975b88857-r7t4w\" (UID: \"720e0316-9060-4bd3-804c-f98017a3fb84\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-r7t4w" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.299981 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6d6qb\" (UniqueName: \"kubernetes.io/projected/7c75d96a-d0a7-4f12-9799-4d01ee215248-kube-api-access-6d6qb\") pod \"infra-operator-controller-manager-7d857cc749-qcq76\" (UID: \"7c75d96a-d0a7-4f12-9799-4d01ee215248\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-qcq76" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.300025 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-c7j6g"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.301305 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-c7j6g" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.301750 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fs68m\" (UniqueName: \"kubernetes.io/projected/63ee8874-1cbb-4183-b16a-f2efd8a1e7d6-kube-api-access-fs68m\") pod \"heat-operator-controller-manager-5d889d78cf-sjnlc\" (UID: \"63ee8874-1cbb-4183-b16a-f2efd8a1e7d6\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-sjnlc" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.304427 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.304449 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-p2n47" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.305084 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-kl4qt"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.306708 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-kl4qt" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.307896 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-chbd7" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.309171 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-hstdc" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.309296 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-57btn"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.310615 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-57btn" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.312076 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-nmcb9" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.313237 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-c7j6g"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.322523 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-kl4qt"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.373529 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-sjnlc" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.386529 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-57btn"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.390828 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-42j7m" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.399683 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7ffh\" (UniqueName: \"kubernetes.io/projected/2607efd2-d90d-4a1e-be6c-5f3c88da67e4-kube-api-access-c7ffh\") pod \"ovn-operator-controller-manager-9976ff44c-kl4qt\" (UID: \"2607efd2-d90d-4a1e-be6c-5f3c88da67e4\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-kl4qt" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.399736 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhgnh\" (UniqueName: \"kubernetes.io/projected/0d7297eb-3633-4083-9d4b-3bf8487360ca-kube-api-access-vhgnh\") pod \"placement-operator-controller-manager-589c58c6c-57btn\" (UID: \"0d7297eb-3633-4083-9d4b-3bf8487360ca\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-57btn" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.399943 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsn76\" (UniqueName: \"kubernetes.io/projected/38eab40a-26bf-4c1b-8911-4d6672629e3e-kube-api-access-gsn76\") pod \"octavia-operator-controller-manager-76fcc6dc7c-xwlmz\" (UID: \"38eab40a-26bf-4c1b-8911-4d6672629e3e\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-xwlmz" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.400001 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxznm\" (UniqueName: \"kubernetes.io/projected/9692c8b9-1e89-47e3-972c-1af7eb8a2ebe-kube-api-access-nxznm\") pod \"keystone-operator-controller-manager-5bd55b4bff-m7m4h\" (UID: \"9692c8b9-1e89-47e3-972c-1af7eb8a2ebe\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-m7m4h" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.400050 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tmdc\" (UniqueName: \"kubernetes.io/projected/aa376f72-5b9a-4087-8ea6-a5cf80be315b-kube-api-access-8tmdc\") pod \"mariadb-operator-controller-manager-88c7-sxv7p\" (UID: \"aa376f72-5b9a-4087-8ea6-a5cf80be315b\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-sxv7p" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.400237 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpct8\" (UniqueName: \"kubernetes.io/projected/855868b4-991b-4f9f-b471-5b1244221192-kube-api-access-zpct8\") pod \"manila-operator-controller-manager-6d68dbc695-zzt9s\" (UID: \"855868b4-991b-4f9f-b471-5b1244221192\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-zzt9s" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.400295 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wh6bb\" (UniqueName: \"kubernetes.io/projected/6413b1d0-7f0f-4bca-88e9-90a9d78bff9c-kube-api-access-wh6bb\") pod \"neutron-operator-controller-manager-64d7b59854-hcr66\" (UID: \"6413b1d0-7f0f-4bca-88e9-90a9d78bff9c\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-hcr66" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.400319 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxx72\" (UniqueName: \"kubernetes.io/projected/5d79f7da-8bfd-4f26-bcf1-b4ad36a6b42f-kube-api-access-sxx72\") pod \"openstack-baremetal-operator-controller-manager-6d776955-c7j6g\" (UID: \"5d79f7da-8bfd-4f26-bcf1-b4ad36a6b42f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-c7j6g" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.406676 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqc7c\" (UniqueName: \"kubernetes.io/projected/2d9e02f5-4644-423a-a783-8dbc51d68570-kube-api-access-zqc7c\") pod \"nova-operator-controller-manager-c7c776c96-gh66v\" (UID: \"2d9e02f5-4644-423a-a783-8dbc51d68570\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-gh66v" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.406782 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5d79f7da-8bfd-4f26-bcf1-b4ad36a6b42f-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-c7j6g\" (UID: \"5d79f7da-8bfd-4f26-bcf1-b4ad36a6b42f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-c7j6g" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.451183 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-r7t4w" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.452662 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpct8\" (UniqueName: \"kubernetes.io/projected/855868b4-991b-4f9f-b471-5b1244221192-kube-api-access-zpct8\") pod \"manila-operator-controller-manager-6d68dbc695-zzt9s\" (UID: \"855868b4-991b-4f9f-b471-5b1244221192\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-zzt9s" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.457444 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxznm\" (UniqueName: \"kubernetes.io/projected/9692c8b9-1e89-47e3-972c-1af7eb8a2ebe-kube-api-access-nxznm\") pod \"keystone-operator-controller-manager-5bd55b4bff-m7m4h\" (UID: \"9692c8b9-1e89-47e3-972c-1af7eb8a2ebe\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-m7m4h" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.465209 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wh6bb\" (UniqueName: \"kubernetes.io/projected/6413b1d0-7f0f-4bca-88e9-90a9d78bff9c-kube-api-access-wh6bb\") pod \"neutron-operator-controller-manager-64d7b59854-hcr66\" (UID: \"6413b1d0-7f0f-4bca-88e9-90a9d78bff9c\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-hcr66" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.465434 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tmdc\" (UniqueName: \"kubernetes.io/projected/aa376f72-5b9a-4087-8ea6-a5cf80be315b-kube-api-access-8tmdc\") pod \"mariadb-operator-controller-manager-88c7-sxv7p\" (UID: \"aa376f72-5b9a-4087-8ea6-a5cf80be315b\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-sxv7p" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.475695 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-d9jtf"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.477317 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-d9jtf" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.480148 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-bp6bq" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.481339 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-7662l"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.483259 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-7662l" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.484740 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-lw2w7" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.506767 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-d9jtf"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.509839 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsn76\" (UniqueName: \"kubernetes.io/projected/38eab40a-26bf-4c1b-8911-4d6672629e3e-kube-api-access-gsn76\") pod \"octavia-operator-controller-manager-76fcc6dc7c-xwlmz\" (UID: \"38eab40a-26bf-4c1b-8911-4d6672629e3e\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-xwlmz" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.509938 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxx72\" (UniqueName: \"kubernetes.io/projected/5d79f7da-8bfd-4f26-bcf1-b4ad36a6b42f-kube-api-access-sxx72\") pod \"openstack-baremetal-operator-controller-manager-6d776955-c7j6g\" (UID: \"5d79f7da-8bfd-4f26-bcf1-b4ad36a6b42f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-c7j6g" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.509997 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqc7c\" (UniqueName: \"kubernetes.io/projected/2d9e02f5-4644-423a-a783-8dbc51d68570-kube-api-access-zqc7c\") pod \"nova-operator-controller-manager-c7c776c96-gh66v\" (UID: \"2d9e02f5-4644-423a-a783-8dbc51d68570\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-gh66v" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.510068 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5d79f7da-8bfd-4f26-bcf1-b4ad36a6b42f-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-c7j6g\" (UID: \"5d79f7da-8bfd-4f26-bcf1-b4ad36a6b42f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-c7j6g" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.510137 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7ffh\" (UniqueName: \"kubernetes.io/projected/2607efd2-d90d-4a1e-be6c-5f3c88da67e4-kube-api-access-c7ffh\") pod \"ovn-operator-controller-manager-9976ff44c-kl4qt\" (UID: \"2607efd2-d90d-4a1e-be6c-5f3c88da67e4\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-kl4qt" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.510153 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhgnh\" (UniqueName: \"kubernetes.io/projected/0d7297eb-3633-4083-9d4b-3bf8487360ca-kube-api-access-vhgnh\") pod \"placement-operator-controller-manager-589c58c6c-57btn\" (UID: \"0d7297eb-3633-4083-9d4b-3bf8487360ca\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-57btn" Sep 30 12:34:23 crc kubenswrapper[5002]: E0930 12:34:23.510890 5002 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 30 12:34:23 crc kubenswrapper[5002]: E0930 12:34:23.510962 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5d79f7da-8bfd-4f26-bcf1-b4ad36a6b42f-cert podName:5d79f7da-8bfd-4f26-bcf1-b4ad36a6b42f nodeName:}" failed. No retries permitted until 2025-09-30 12:34:24.010945771 +0000 UTC m=+838.260627907 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/5d79f7da-8bfd-4f26-bcf1-b4ad36a6b42f-cert") pod "openstack-baremetal-operator-controller-manager-6d776955-c7j6g" (UID: "5d79f7da-8bfd-4f26-bcf1-b4ad36a6b42f") : secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.520162 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-7662l"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.522956 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-m7m4h" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.533087 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxx72\" (UniqueName: \"kubernetes.io/projected/5d79f7da-8bfd-4f26-bcf1-b4ad36a6b42f-kube-api-access-sxx72\") pod \"openstack-baremetal-operator-controller-manager-6d776955-c7j6g\" (UID: \"5d79f7da-8bfd-4f26-bcf1-b4ad36a6b42f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-c7j6g" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.535825 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7ffh\" (UniqueName: \"kubernetes.io/projected/2607efd2-d90d-4a1e-be6c-5f3c88da67e4-kube-api-access-c7ffh\") pod \"ovn-operator-controller-manager-9976ff44c-kl4qt\" (UID: \"2607efd2-d90d-4a1e-be6c-5f3c88da67e4\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-kl4qt" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.536670 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqc7c\" (UniqueName: \"kubernetes.io/projected/2d9e02f5-4644-423a-a783-8dbc51d68570-kube-api-access-zqc7c\") pod \"nova-operator-controller-manager-c7c776c96-gh66v\" (UID: \"2d9e02f5-4644-423a-a783-8dbc51d68570\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-gh66v" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.537085 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhgnh\" (UniqueName: \"kubernetes.io/projected/0d7297eb-3633-4083-9d4b-3bf8487360ca-kube-api-access-vhgnh\") pod \"placement-operator-controller-manager-589c58c6c-57btn\" (UID: \"0d7297eb-3633-4083-9d4b-3bf8487360ca\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-57btn" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.537675 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsn76\" (UniqueName: \"kubernetes.io/projected/38eab40a-26bf-4c1b-8911-4d6672629e3e-kube-api-access-gsn76\") pod \"octavia-operator-controller-manager-76fcc6dc7c-xwlmz\" (UID: \"38eab40a-26bf-4c1b-8911-4d6672629e3e\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-xwlmz" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.549534 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-57btn" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.606541 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-z2dcj"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.607661 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-f66b554c6-z2dcj" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.610366 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-zzt9s" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.611124 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-f4wsk" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.611317 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjr96\" (UniqueName: \"kubernetes.io/projected/90cc681e-c24d-4b64-862d-3514308e77c6-kube-api-access-sjr96\") pod \"swift-operator-controller-manager-bc7dc7bd9-d9jtf\" (UID: \"90cc681e-c24d-4b64-862d-3514308e77c6\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-d9jtf" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.611435 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2zj8\" (UniqueName: \"kubernetes.io/projected/78f98b71-27e2-411c-b610-8b4be1068d5a-kube-api-access-s2zj8\") pod \"telemetry-operator-controller-manager-b8d54b5d7-7662l\" (UID: \"78f98b71-27e2-411c-b610-8b4be1068d5a\") " pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-7662l" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.630424 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-z2dcj"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.662628 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-n44h7"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.663765 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-n44h7" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.669798 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-d2shq" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.687804 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-88c7-sxv7p" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.689001 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-hcr66" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.692096 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-n44h7"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.708320 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-gh66v" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.712724 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bnsx\" (UniqueName: \"kubernetes.io/projected/0fa1c573-cbf9-43f1-8106-7cf73e93f1f3-kube-api-access-9bnsx\") pod \"test-operator-controller-manager-f66b554c6-z2dcj\" (UID: \"0fa1c573-cbf9-43f1-8106-7cf73e93f1f3\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-z2dcj" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.719240 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2zj8\" (UniqueName: \"kubernetes.io/projected/78f98b71-27e2-411c-b610-8b4be1068d5a-kube-api-access-s2zj8\") pod \"telemetry-operator-controller-manager-b8d54b5d7-7662l\" (UID: \"78f98b71-27e2-411c-b610-8b4be1068d5a\") " pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-7662l" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.719392 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjr96\" (UniqueName: \"kubernetes.io/projected/90cc681e-c24d-4b64-862d-3514308e77c6-kube-api-access-sjr96\") pod \"swift-operator-controller-manager-bc7dc7bd9-d9jtf\" (UID: \"90cc681e-c24d-4b64-862d-3514308e77c6\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-d9jtf" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.724977 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-84697dfb4d-w86tm"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.736025 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-84697dfb4d-w86tm"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.736261 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-84697dfb4d-w86tm" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.742011 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjr96\" (UniqueName: \"kubernetes.io/projected/90cc681e-c24d-4b64-862d-3514308e77c6-kube-api-access-sjr96\") pod \"swift-operator-controller-manager-bc7dc7bd9-d9jtf\" (UID: \"90cc681e-c24d-4b64-862d-3514308e77c6\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-d9jtf" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.742332 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.742522 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-r4jgw" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.752493 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-h9dzh"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.753407 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-h9dzh" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.760547 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-xwlmz" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.760646 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2zj8\" (UniqueName: \"kubernetes.io/projected/78f98b71-27e2-411c-b610-8b4be1068d5a-kube-api-access-s2zj8\") pod \"telemetry-operator-controller-manager-b8d54b5d7-7662l\" (UID: \"78f98b71-27e2-411c-b610-8b4be1068d5a\") " pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-7662l" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.761575 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-gvr4m" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.763336 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-h9dzh"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.787331 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-ph95z"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.820363 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhsbm\" (UniqueName: \"kubernetes.io/projected/b4044bb7-6e63-45d0-9640-7966bcd23aa9-kube-api-access-bhsbm\") pod \"rabbitmq-cluster-operator-manager-79d8469568-h9dzh\" (UID: \"b4044bb7-6e63-45d0-9640-7966bcd23aa9\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-h9dzh" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.820686 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xfwjm\" (UniqueName: \"kubernetes.io/projected/e5254af8-649d-413f-b146-51c982f48073-kube-api-access-xfwjm\") pod \"watcher-operator-controller-manager-76669f99c-n44h7\" (UID: \"e5254af8-649d-413f-b146-51c982f48073\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-n44h7" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.820755 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q28mr\" (UniqueName: \"kubernetes.io/projected/32ebad0d-e677-4e33-b555-44db1541becc-kube-api-access-q28mr\") pod \"openstack-operator-controller-manager-84697dfb4d-w86tm\" (UID: \"32ebad0d-e677-4e33-b555-44db1541becc\") " pod="openstack-operators/openstack-operator-controller-manager-84697dfb4d-w86tm" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.820808 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bnsx\" (UniqueName: \"kubernetes.io/projected/0fa1c573-cbf9-43f1-8106-7cf73e93f1f3-kube-api-access-9bnsx\") pod \"test-operator-controller-manager-f66b554c6-z2dcj\" (UID: \"0fa1c573-cbf9-43f1-8106-7cf73e93f1f3\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-z2dcj" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.820865 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7c75d96a-d0a7-4f12-9799-4d01ee215248-cert\") pod \"infra-operator-controller-manager-7d857cc749-qcq76\" (UID: \"7c75d96a-d0a7-4f12-9799-4d01ee215248\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-qcq76" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.820883 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/32ebad0d-e677-4e33-b555-44db1541becc-cert\") pod \"openstack-operator-controller-manager-84697dfb4d-w86tm\" (UID: \"32ebad0d-e677-4e33-b555-44db1541becc\") " pod="openstack-operators/openstack-operator-controller-manager-84697dfb4d-w86tm" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.833852 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7c75d96a-d0a7-4f12-9799-4d01ee215248-cert\") pod \"infra-operator-controller-manager-7d857cc749-qcq76\" (UID: \"7c75d96a-d0a7-4f12-9799-4d01ee215248\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-qcq76" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.838813 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-kl4qt" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.840661 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-ph95z" event={"ID":"8eefd962-2b74-4b77-8bc9-338b8ccfd0cf","Type":"ContainerStarted","Data":"a8a036e6bf9e3133ffe32b7c18de779894103084d29b5196ffb6093ca007e5ec"} Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.847050 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bnsx\" (UniqueName: \"kubernetes.io/projected/0fa1c573-cbf9-43f1-8106-7cf73e93f1f3-kube-api-access-9bnsx\") pod \"test-operator-controller-manager-f66b554c6-z2dcj\" (UID: \"0fa1c573-cbf9-43f1-8106-7cf73e93f1f3\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-z2dcj" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.870344 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-d9jtf" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.875242 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-mf4jp"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.897119 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-7662l" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.913069 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-rvv9g"] Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.922402 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/32ebad0d-e677-4e33-b555-44db1541becc-cert\") pod \"openstack-operator-controller-manager-84697dfb4d-w86tm\" (UID: \"32ebad0d-e677-4e33-b555-44db1541becc\") " pod="openstack-operators/openstack-operator-controller-manager-84697dfb4d-w86tm" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.922442 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhsbm\" (UniqueName: \"kubernetes.io/projected/b4044bb7-6e63-45d0-9640-7966bcd23aa9-kube-api-access-bhsbm\") pod \"rabbitmq-cluster-operator-manager-79d8469568-h9dzh\" (UID: \"b4044bb7-6e63-45d0-9640-7966bcd23aa9\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-h9dzh" Sep 30 12:34:23 crc kubenswrapper[5002]: E0930 12:34:23.922658 5002 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.922684 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfwjm\" (UniqueName: \"kubernetes.io/projected/e5254af8-649d-413f-b146-51c982f48073-kube-api-access-xfwjm\") pod \"watcher-operator-controller-manager-76669f99c-n44h7\" (UID: \"e5254af8-649d-413f-b146-51c982f48073\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-n44h7" Sep 30 12:34:23 crc kubenswrapper[5002]: E0930 12:34:23.922739 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/32ebad0d-e677-4e33-b555-44db1541becc-cert podName:32ebad0d-e677-4e33-b555-44db1541becc nodeName:}" failed. No retries permitted until 2025-09-30 12:34:24.422721999 +0000 UTC m=+838.672404145 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/32ebad0d-e677-4e33-b555-44db1541becc-cert") pod "openstack-operator-controller-manager-84697dfb4d-w86tm" (UID: "32ebad0d-e677-4e33-b555-44db1541becc") : secret "webhook-server-cert" not found Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.922751 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q28mr\" (UniqueName: \"kubernetes.io/projected/32ebad0d-e677-4e33-b555-44db1541becc-kube-api-access-q28mr\") pod \"openstack-operator-controller-manager-84697dfb4d-w86tm\" (UID: \"32ebad0d-e677-4e33-b555-44db1541becc\") " pod="openstack-operators/openstack-operator-controller-manager-84697dfb4d-w86tm" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.937481 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-f66b554c6-z2dcj" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.946622 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhsbm\" (UniqueName: \"kubernetes.io/projected/b4044bb7-6e63-45d0-9640-7966bcd23aa9-kube-api-access-bhsbm\") pod \"rabbitmq-cluster-operator-manager-79d8469568-h9dzh\" (UID: \"b4044bb7-6e63-45d0-9640-7966bcd23aa9\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-h9dzh" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.959256 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xfwjm\" (UniqueName: \"kubernetes.io/projected/e5254af8-649d-413f-b146-51c982f48073-kube-api-access-xfwjm\") pod \"watcher-operator-controller-manager-76669f99c-n44h7\" (UID: \"e5254af8-649d-413f-b146-51c982f48073\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-n44h7" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.961107 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q28mr\" (UniqueName: \"kubernetes.io/projected/32ebad0d-e677-4e33-b555-44db1541becc-kube-api-access-q28mr\") pod \"openstack-operator-controller-manager-84697dfb4d-w86tm\" (UID: \"32ebad0d-e677-4e33-b555-44db1541becc\") " pod="openstack-operators/openstack-operator-controller-manager-84697dfb4d-w86tm" Sep 30 12:34:23 crc kubenswrapper[5002]: I0930 12:34:23.991941 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-n44h7" Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.007600 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-qcq76" Sep 30 12:34:24 crc kubenswrapper[5002]: W0930 12:34:24.007651 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod636ed6c4_281d_4ea2_be99_a04e07b08170.slice/crio-974fc3d69bf4b7d7b3051d68a6325a9e72559299a60c41dfde31c850d5f3387d WatchSource:0}: Error finding container 974fc3d69bf4b7d7b3051d68a6325a9e72559299a60c41dfde31c850d5f3387d: Status 404 returned error can't find the container with id 974fc3d69bf4b7d7b3051d68a6325a9e72559299a60c41dfde31c850d5f3387d Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.023612 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5d79f7da-8bfd-4f26-bcf1-b4ad36a6b42f-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-c7j6g\" (UID: \"5d79f7da-8bfd-4f26-bcf1-b4ad36a6b42f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-c7j6g" Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.033172 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5d79f7da-8bfd-4f26-bcf1-b4ad36a6b42f-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-c7j6g\" (UID: \"5d79f7da-8bfd-4f26-bcf1-b4ad36a6b42f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-c7j6g" Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.088654 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-chbd7"] Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.099170 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-h9dzh" Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.121088 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-c7j6g" Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.427460 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/32ebad0d-e677-4e33-b555-44db1541becc-cert\") pod \"openstack-operator-controller-manager-84697dfb4d-w86tm\" (UID: \"32ebad0d-e677-4e33-b555-44db1541becc\") " pod="openstack-operators/openstack-operator-controller-manager-84697dfb4d-w86tm" Sep 30 12:34:24 crc kubenswrapper[5002]: E0930 12:34:24.427669 5002 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Sep 30 12:34:24 crc kubenswrapper[5002]: E0930 12:34:24.427719 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/32ebad0d-e677-4e33-b555-44db1541becc-cert podName:32ebad0d-e677-4e33-b555-44db1541becc nodeName:}" failed. No retries permitted until 2025-09-30 12:34:25.427705382 +0000 UTC m=+839.677387528 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/32ebad0d-e677-4e33-b555-44db1541becc-cert") pod "openstack-operator-controller-manager-84697dfb4d-w86tm" (UID: "32ebad0d-e677-4e33-b555-44db1541becc") : secret "webhook-server-cert" not found Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.434273 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-sjnlc"] Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.458141 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-r7t4w"] Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.495951 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-42j7m"] Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.503724 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-57btn"] Sep 30 12:34:24 crc kubenswrapper[5002]: W0930 12:34:24.505090 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0d7297eb_3633_4083_9d4b_3bf8487360ca.slice/crio-bf510395618f68439c89f3667374331f202b69ec31ed23495797dfb90575be72 WatchSource:0}: Error finding container bf510395618f68439c89f3667374331f202b69ec31ed23495797dfb90575be72: Status 404 returned error can't find the container with id bf510395618f68439c89f3667374331f202b69ec31ed23495797dfb90575be72 Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.511374 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-gh66v"] Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.516368 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-sxv7p"] Sep 30 12:34:24 crc kubenswrapper[5002]: W0930 12:34:24.516656 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2d9e02f5_4644_423a_a783_8dbc51d68570.slice/crio-426f83f3daf946ae6a9a29f8940551c6cfdf0e089029edd9ba386bdb4c5bab22 WatchSource:0}: Error finding container 426f83f3daf946ae6a9a29f8940551c6cfdf0e089029edd9ba386bdb4c5bab22: Status 404 returned error can't find the container with id 426f83f3daf946ae6a9a29f8940551c6cfdf0e089029edd9ba386bdb4c5bab22 Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.525081 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-m7m4h"] Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.527261 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-zzt9s"] Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.862969 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-rvv9g" event={"ID":"9caa184d-b4ff-4419-8f8d-ede2b0b6845e","Type":"ContainerStarted","Data":"b789878e4b6a0fa2cf6823f74d4f7b05797b18d09299fa42ad67c677ae61e4e2"} Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.864623 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-sjnlc" event={"ID":"63ee8874-1cbb-4183-b16a-f2efd8a1e7d6","Type":"ContainerStarted","Data":"37445df1e2c03bcfdf53eed5e7b7bec6a1b107acb0a637d712670a78d4c60226"} Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.865896 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-m7m4h" event={"ID":"9692c8b9-1e89-47e3-972c-1af7eb8a2ebe","Type":"ContainerStarted","Data":"243c014062047a314ea5d0725aafc1822602b70e6d646109b93132948f872bb8"} Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.873508 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-mf4jp" event={"ID":"636ed6c4-281d-4ea2-be99-a04e07b08170","Type":"ContainerStarted","Data":"974fc3d69bf4b7d7b3051d68a6325a9e72559299a60c41dfde31c850d5f3387d"} Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.875184 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-zzt9s" event={"ID":"855868b4-991b-4f9f-b471-5b1244221192","Type":"ContainerStarted","Data":"5fab92984262f5bf936998c2712a9f22265fb61aec30e854db4e8c2e3d3e7c4a"} Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.877340 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-r7t4w" event={"ID":"720e0316-9060-4bd3-804c-f98017a3fb84","Type":"ContainerStarted","Data":"1645d3d722dba13bde5f1dc220d2f21f7222f96cdd9875da4ed70421c32a64c4"} Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.878327 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-gh66v" event={"ID":"2d9e02f5-4644-423a-a783-8dbc51d68570","Type":"ContainerStarted","Data":"426f83f3daf946ae6a9a29f8940551c6cfdf0e089029edd9ba386bdb4c5bab22"} Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.879564 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-42j7m" event={"ID":"aa7bbe9e-668f-4ee3-976d-8e9ddbdbb092","Type":"ContainerStarted","Data":"afc505a2f52980f82adf46a1fb81145cbaf635706c462835b0aaf7ba8c8fa5d9"} Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.880908 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-chbd7" event={"ID":"d44fe72c-afa7-4442-b308-0b111e16c7b8","Type":"ContainerStarted","Data":"99bcd5815d43a1273a7b6a6db8d8e07e9f705319d346b97784b0018fa68f38d2"} Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.882016 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-sxv7p" event={"ID":"aa376f72-5b9a-4087-8ea6-a5cf80be315b","Type":"ContainerStarted","Data":"9f02a6f55c7f016c4967dfcae13b43562749fa1844645c3d5d6884f494bfe6a9"} Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.883421 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-57btn" event={"ID":"0d7297eb-3633-4083-9d4b-3bf8487360ca","Type":"ContainerStarted","Data":"bf510395618f68439c89f3667374331f202b69ec31ed23495797dfb90575be72"} Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.919940 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-kl4qt"] Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.948565 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-d9jtf"] Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.952185 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-n44h7"] Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.968307 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-hcr66"] Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.976809 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-7662l"] Sep 30 12:34:24 crc kubenswrapper[5002]: E0930 12:34:24.977694 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-gsn76,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-76fcc6dc7c-xwlmz_openstack-operators(38eab40a-26bf-4c1b-8911-4d6672629e3e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 12:34:24 crc kubenswrapper[5002]: I0930 12:34:24.988654 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-xwlmz"] Sep 30 12:34:24 crc kubenswrapper[5002]: W0930 12:34:24.990644 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6413b1d0_7f0f_4bca_88e9_90a9d78bff9c.slice/crio-b01312f8b6a95678d6fe72f4c29a8862029fdcafdc9d656d790c8cfb31760558 WatchSource:0}: Error finding container b01312f8b6a95678d6fe72f4c29a8862029fdcafdc9d656d790c8cfb31760558: Status 404 returned error can't find the container with id b01312f8b6a95678d6fe72f4c29a8862029fdcafdc9d656d790c8cfb31760558 Sep 30 12:34:24 crc kubenswrapper[5002]: W0930 12:34:24.993460 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0fa1c573_cbf9_43f1_8106_7cf73e93f1f3.slice/crio-ead398231ea215259e1558e936fa25529ac446524d6d5dfe47dad93a304637d2 WatchSource:0}: Error finding container ead398231ea215259e1558e936fa25529ac446524d6d5dfe47dad93a304637d2: Status 404 returned error can't find the container with id ead398231ea215259e1558e936fa25529ac446524d6d5dfe47dad93a304637d2 Sep 30 12:34:24 crc kubenswrapper[5002]: W0930 12:34:24.994025 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7c75d96a_d0a7_4f12_9799_4d01ee215248.slice/crio-d53767b3b4557796af4cf01a281b9dcea467cbd40760b272411ab21f6f9bec8c WatchSource:0}: Error finding container d53767b3b4557796af4cf01a281b9dcea467cbd40760b272411ab21f6f9bec8c: Status 404 returned error can't find the container with id d53767b3b4557796af4cf01a281b9dcea467cbd40760b272411ab21f6f9bec8c Sep 30 12:34:24 crc kubenswrapper[5002]: W0930 12:34:24.994730 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod78f98b71_27e2_411c_b610_8b4be1068d5a.slice/crio-2c5a5863c84dd5fe5aedc8749ae6dd7d37a0848603515ed4fa3da3395814a75e WatchSource:0}: Error finding container 2c5a5863c84dd5fe5aedc8749ae6dd7d37a0848603515ed4fa3da3395814a75e: Status 404 returned error can't find the container with id 2c5a5863c84dd5fe5aedc8749ae6dd7d37a0848603515ed4fa3da3395814a75e Sep 30 12:34:24 crc kubenswrapper[5002]: E0930 12:34:24.997748 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:485df5c7813cdf4cf21f48ec48c8e3e4962fee6a1ae4c64f7af127d5ab346a10,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wh6bb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-64d7b59854-hcr66_openstack-operators(6413b1d0-7f0f-4bca-88e9-90a9d78bff9c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 12:34:25 crc kubenswrapper[5002]: I0930 12:34:25.001192 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-z2dcj"] Sep 30 12:34:25 crc kubenswrapper[5002]: I0930 12:34:25.005057 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d857cc749-qcq76"] Sep 30 12:34:25 crc kubenswrapper[5002]: E0930 12:34:25.007100 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9bnsx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-f66b554c6-z2dcj_openstack-operators(0fa1c573-cbf9-43f1-8106-7cf73e93f1f3): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 12:34:25 crc kubenswrapper[5002]: E0930 12:34:25.007121 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-s2zj8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-b8d54b5d7-7662l_openstack-operators(78f98b71-27e2-411c-b610-8b4be1068d5a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 12:34:25 crc kubenswrapper[5002]: E0930 12:34:25.007121 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:de99ad053f95f132f62b38335b2e8bf22fc28acbd441c3814764d63b63ef755f,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6d6qb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-7d857cc749-qcq76_openstack-operators(7c75d96a-d0a7-4f12-9799-4d01ee215248): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 12:34:25 crc kubenswrapper[5002]: I0930 12:34:25.013575 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-c7j6g"] Sep 30 12:34:25 crc kubenswrapper[5002]: I0930 12:34:25.017596 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-h9dzh"] Sep 30 12:34:25 crc kubenswrapper[5002]: W0930 12:34:25.026371 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5d79f7da_8bfd_4f26_bcf1_b4ad36a6b42f.slice/crio-ca0cfeb36d7fe933662155ef8e1580a36dd53f40f41d3190422c547f1a35ed0d WatchSource:0}: Error finding container ca0cfeb36d7fe933662155ef8e1580a36dd53f40f41d3190422c547f1a35ed0d: Status 404 returned error can't find the container with id ca0cfeb36d7fe933662155ef8e1580a36dd53f40f41d3190422c547f1a35ed0d Sep 30 12:34:25 crc kubenswrapper[5002]: E0930 12:34:25.055662 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bhsbm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-79d8469568-h9dzh_openstack-operators(b4044bb7-6e63-45d0-9640-7966bcd23aa9): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 12:34:25 crc kubenswrapper[5002]: E0930 12:34:25.056810 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-h9dzh" podUID="b4044bb7-6e63-45d0-9640-7966bcd23aa9" Sep 30 12:34:25 crc kubenswrapper[5002]: E0930 12:34:25.165282 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-xwlmz" podUID="38eab40a-26bf-4c1b-8911-4d6672629e3e" Sep 30 12:34:25 crc kubenswrapper[5002]: E0930 12:34:25.203878 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-qcq76" podUID="7c75d96a-d0a7-4f12-9799-4d01ee215248" Sep 30 12:34:25 crc kubenswrapper[5002]: E0930 12:34:25.261988 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-7662l" podUID="78f98b71-27e2-411c-b610-8b4be1068d5a" Sep 30 12:34:25 crc kubenswrapper[5002]: E0930 12:34:25.271610 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-hcr66" podUID="6413b1d0-7f0f-4bca-88e9-90a9d78bff9c" Sep 30 12:34:25 crc kubenswrapper[5002]: E0930 12:34:25.280775 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-f66b554c6-z2dcj" podUID="0fa1c573-cbf9-43f1-8106-7cf73e93f1f3" Sep 30 12:34:25 crc kubenswrapper[5002]: I0930 12:34:25.440208 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/32ebad0d-e677-4e33-b555-44db1541becc-cert\") pod \"openstack-operator-controller-manager-84697dfb4d-w86tm\" (UID: \"32ebad0d-e677-4e33-b555-44db1541becc\") " pod="openstack-operators/openstack-operator-controller-manager-84697dfb4d-w86tm" Sep 30 12:34:25 crc kubenswrapper[5002]: I0930 12:34:25.445713 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/32ebad0d-e677-4e33-b555-44db1541becc-cert\") pod \"openstack-operator-controller-manager-84697dfb4d-w86tm\" (UID: \"32ebad0d-e677-4e33-b555-44db1541becc\") " pod="openstack-operators/openstack-operator-controller-manager-84697dfb4d-w86tm" Sep 30 12:34:25 crc kubenswrapper[5002]: I0930 12:34:25.575722 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-84697dfb4d-w86tm" Sep 30 12:34:25 crc kubenswrapper[5002]: I0930 12:34:25.892611 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-xwlmz" event={"ID":"38eab40a-26bf-4c1b-8911-4d6672629e3e","Type":"ContainerStarted","Data":"6aeee8f6f0cc06a36d83a0c903c08c725475094dd49ccab6abc432cf453d4efe"} Sep 30 12:34:25 crc kubenswrapper[5002]: I0930 12:34:25.892911 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-xwlmz" event={"ID":"38eab40a-26bf-4c1b-8911-4d6672629e3e","Type":"ContainerStarted","Data":"ef4256cc79527d09c57854d4f16899f06b3c776c7655569fbd66bfa546ab30fa"} Sep 30 12:34:25 crc kubenswrapper[5002]: E0930 12:34:25.894570 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-xwlmz" podUID="38eab40a-26bf-4c1b-8911-4d6672629e3e" Sep 30 12:34:25 crc kubenswrapper[5002]: I0930 12:34:25.895798 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-kl4qt" event={"ID":"2607efd2-d90d-4a1e-be6c-5f3c88da67e4","Type":"ContainerStarted","Data":"875a8a857c7bb5cf79132d7f420caacee118c6615884f6cfd059b65aaa182b8e"} Sep 30 12:34:25 crc kubenswrapper[5002]: I0930 12:34:25.900456 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-n44h7" event={"ID":"e5254af8-649d-413f-b146-51c982f48073","Type":"ContainerStarted","Data":"545b1c3d48d11c23ca4a57a4c78596f126a878ada7472544a8bd00c0d50d9aa0"} Sep 30 12:34:25 crc kubenswrapper[5002]: I0930 12:34:25.902452 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-7662l" event={"ID":"78f98b71-27e2-411c-b610-8b4be1068d5a","Type":"ContainerStarted","Data":"6620cce31b1a5200f434eb22383e5a775e4d2a5d9f15a8172d6939feebd12e22"} Sep 30 12:34:25 crc kubenswrapper[5002]: I0930 12:34:25.902517 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-7662l" event={"ID":"78f98b71-27e2-411c-b610-8b4be1068d5a","Type":"ContainerStarted","Data":"2c5a5863c84dd5fe5aedc8749ae6dd7d37a0848603515ed4fa3da3395814a75e"} Sep 30 12:34:25 crc kubenswrapper[5002]: E0930 12:34:25.903640 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-7662l" podUID="78f98b71-27e2-411c-b610-8b4be1068d5a" Sep 30 12:34:25 crc kubenswrapper[5002]: I0930 12:34:25.904493 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-h9dzh" event={"ID":"b4044bb7-6e63-45d0-9640-7966bcd23aa9","Type":"ContainerStarted","Data":"3b6b6819c5da1f0727f325b2d4386027df918a55acb21a3495fc509f2a7508c8"} Sep 30 12:34:25 crc kubenswrapper[5002]: I0930 12:34:25.906262 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-hcr66" event={"ID":"6413b1d0-7f0f-4bca-88e9-90a9d78bff9c","Type":"ContainerStarted","Data":"fb77474970a7321d2f2a1342214deee19ed5b67d0bf69e7dd93c481f6a870eb4"} Sep 30 12:34:25 crc kubenswrapper[5002]: I0930 12:34:25.906293 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-hcr66" event={"ID":"6413b1d0-7f0f-4bca-88e9-90a9d78bff9c","Type":"ContainerStarted","Data":"b01312f8b6a95678d6fe72f4c29a8862029fdcafdc9d656d790c8cfb31760558"} Sep 30 12:34:25 crc kubenswrapper[5002]: E0930 12:34:25.907622 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:485df5c7813cdf4cf21f48ec48c8e3e4962fee6a1ae4c64f7af127d5ab346a10\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-hcr66" podUID="6413b1d0-7f0f-4bca-88e9-90a9d78bff9c" Sep 30 12:34:25 crc kubenswrapper[5002]: E0930 12:34:25.908025 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-h9dzh" podUID="b4044bb7-6e63-45d0-9640-7966bcd23aa9" Sep 30 12:34:25 crc kubenswrapper[5002]: I0930 12:34:25.919185 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-z2dcj" event={"ID":"0fa1c573-cbf9-43f1-8106-7cf73e93f1f3","Type":"ContainerStarted","Data":"01346b228e1b56c517badb6142febe852e1e3621873e771992d9f30a398a694e"} Sep 30 12:34:25 crc kubenswrapper[5002]: I0930 12:34:25.919229 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-z2dcj" event={"ID":"0fa1c573-cbf9-43f1-8106-7cf73e93f1f3","Type":"ContainerStarted","Data":"ead398231ea215259e1558e936fa25529ac446524d6d5dfe47dad93a304637d2"} Sep 30 12:34:25 crc kubenswrapper[5002]: E0930 12:34:25.922017 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80\\\"\"" pod="openstack-operators/test-operator-controller-manager-f66b554c6-z2dcj" podUID="0fa1c573-cbf9-43f1-8106-7cf73e93f1f3" Sep 30 12:34:25 crc kubenswrapper[5002]: I0930 12:34:25.923176 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-d9jtf" event={"ID":"90cc681e-c24d-4b64-862d-3514308e77c6","Type":"ContainerStarted","Data":"f9ace9bbee8ae2dd337f5db51aa139b9624bd7e007379bff6cf1b4a5c6fdb049"} Sep 30 12:34:25 crc kubenswrapper[5002]: I0930 12:34:25.925509 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-c7j6g" event={"ID":"5d79f7da-8bfd-4f26-bcf1-b4ad36a6b42f","Type":"ContainerStarted","Data":"ca0cfeb36d7fe933662155ef8e1580a36dd53f40f41d3190422c547f1a35ed0d"} Sep 30 12:34:25 crc kubenswrapper[5002]: I0930 12:34:25.927872 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-qcq76" event={"ID":"7c75d96a-d0a7-4f12-9799-4d01ee215248","Type":"ContainerStarted","Data":"80237db35f6de644abc0cb06fcc93b9f533e4c5f9fa3fe9369e792ce5e2704ad"} Sep 30 12:34:25 crc kubenswrapper[5002]: I0930 12:34:25.927900 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-qcq76" event={"ID":"7c75d96a-d0a7-4f12-9799-4d01ee215248","Type":"ContainerStarted","Data":"d53767b3b4557796af4cf01a281b9dcea467cbd40760b272411ab21f6f9bec8c"} Sep 30 12:34:25 crc kubenswrapper[5002]: E0930 12:34:25.932199 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:de99ad053f95f132f62b38335b2e8bf22fc28acbd441c3814764d63b63ef755f\\\"\"" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-qcq76" podUID="7c75d96a-d0a7-4f12-9799-4d01ee215248" Sep 30 12:34:26 crc kubenswrapper[5002]: E0930 12:34:26.936155 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:de99ad053f95f132f62b38335b2e8bf22fc28acbd441c3814764d63b63ef755f\\\"\"" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-qcq76" podUID="7c75d96a-d0a7-4f12-9799-4d01ee215248" Sep 30 12:34:26 crc kubenswrapper[5002]: E0930 12:34:26.937208 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-7662l" podUID="78f98b71-27e2-411c-b610-8b4be1068d5a" Sep 30 12:34:26 crc kubenswrapper[5002]: E0930 12:34:26.937320 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-xwlmz" podUID="38eab40a-26bf-4c1b-8911-4d6672629e3e" Sep 30 12:34:26 crc kubenswrapper[5002]: E0930 12:34:26.937420 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:485df5c7813cdf4cf21f48ec48c8e3e4962fee6a1ae4c64f7af127d5ab346a10\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-hcr66" podUID="6413b1d0-7f0f-4bca-88e9-90a9d78bff9c" Sep 30 12:34:26 crc kubenswrapper[5002]: E0930 12:34:26.937512 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80\\\"\"" pod="openstack-operators/test-operator-controller-manager-f66b554c6-z2dcj" podUID="0fa1c573-cbf9-43f1-8106-7cf73e93f1f3" Sep 30 12:34:26 crc kubenswrapper[5002]: E0930 12:34:26.937576 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-h9dzh" podUID="b4044bb7-6e63-45d0-9640-7966bcd23aa9" Sep 30 12:34:33 crc kubenswrapper[5002]: I0930 12:34:33.939005 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-84697dfb4d-w86tm"] Sep 30 12:34:33 crc kubenswrapper[5002]: I0930 12:34:33.979315 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-chbd7" event={"ID":"d44fe72c-afa7-4442-b308-0b111e16c7b8","Type":"ContainerStarted","Data":"94e9ddbef517a713aa0d300a0b88cd37422c220523691f9b4acfaf262c1cc077"} Sep 30 12:34:33 crc kubenswrapper[5002]: I0930 12:34:33.987144 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-rvv9g" event={"ID":"9caa184d-b4ff-4419-8f8d-ede2b0b6845e","Type":"ContainerStarted","Data":"b13f618c6d61d93ec67ee79e123dc664854d234e5dec5ab5e8dde988a6f3d45a"} Sep 30 12:34:33 crc kubenswrapper[5002]: I0930 12:34:33.999331 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-n44h7" event={"ID":"e5254af8-649d-413f-b146-51c982f48073","Type":"ContainerStarted","Data":"468628b513a7db034cdae65312185baa78886d7f925f55fb7191b990ba2357d2"} Sep 30 12:34:34 crc kubenswrapper[5002]: I0930 12:34:34.002152 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-mf4jp" event={"ID":"636ed6c4-281d-4ea2-be99-a04e07b08170","Type":"ContainerStarted","Data":"1f76b4e582fad2fdfcb76352e663788998243c279c73eea3f7b0fb4922f3e773"} Sep 30 12:34:34 crc kubenswrapper[5002]: I0930 12:34:34.010358 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-ph95z" event={"ID":"8eefd962-2b74-4b77-8bc9-338b8ccfd0cf","Type":"ContainerStarted","Data":"c9311aa780c244a35495edf4628bf6e3b5dfb4a3180ad2afabcb8818db091b2d"} Sep 30 12:34:34 crc kubenswrapper[5002]: I0930 12:34:34.015874 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-gh66v" event={"ID":"2d9e02f5-4644-423a-a783-8dbc51d68570","Type":"ContainerStarted","Data":"2d3eb53a9882d363d8f3c775a584ae2a8e589e2553cb1f68fbef928385d84dda"} Sep 30 12:34:34 crc kubenswrapper[5002]: I0930 12:34:34.022523 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-42j7m" event={"ID":"aa7bbe9e-668f-4ee3-976d-8e9ddbdbb092","Type":"ContainerStarted","Data":"1d529f6a3f64f77b0028fb18ece7415d77b462e425bb0f3927fcb8d2dadb8ee0"} Sep 30 12:34:34 crc kubenswrapper[5002]: I0930 12:34:34.028461 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-57btn" event={"ID":"0d7297eb-3633-4083-9d4b-3bf8487360ca","Type":"ContainerStarted","Data":"da5ee34eb1eea2d7994fbd37b79a27032ef02b1671e9d35cca5b699bc57571dd"} Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.039458 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-ph95z" event={"ID":"8eefd962-2b74-4b77-8bc9-338b8ccfd0cf","Type":"ContainerStarted","Data":"b6394efd5b8ac1906d80bd29619d6cddc5517fac056c98c224450ab2d612c76f"} Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.039843 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-ph95z" Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.041352 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-m7m4h" event={"ID":"9692c8b9-1e89-47e3-972c-1af7eb8a2ebe","Type":"ContainerStarted","Data":"8dcdc7789d173e177a681a8a53e5ac7ed09addafb764f270150141d0bdd24a46"} Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.043723 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-r7t4w" event={"ID":"720e0316-9060-4bd3-804c-f98017a3fb84","Type":"ContainerStarted","Data":"9cf88e39929055b04a2bd707b9037f49af2ad3bd63cfa1561d5899efb51be7eb"} Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.045910 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-sjnlc" event={"ID":"63ee8874-1cbb-4183-b16a-f2efd8a1e7d6","Type":"ContainerStarted","Data":"346fa458b0db43c0b8995250c208599aef9efc58eb8833bc96fe8b44a223de81"} Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.048108 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-mf4jp" event={"ID":"636ed6c4-281d-4ea2-be99-a04e07b08170","Type":"ContainerStarted","Data":"f2bfc3686147c70a9e64272322ef5eaf862ea846929a7bd8a049670d20d87126"} Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.048483 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-mf4jp" Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.050052 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-zzt9s" event={"ID":"855868b4-991b-4f9f-b471-5b1244221192","Type":"ContainerStarted","Data":"1a660d4babb426fc07058da85f43f52010094c090bf703a73d782c94720d07ff"} Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.052331 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-sxv7p" event={"ID":"aa376f72-5b9a-4087-8ea6-a5cf80be315b","Type":"ContainerStarted","Data":"5c498e9683c54f8419ad3975124a7e54261a1478de410950208e0d0dca6625cc"} Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.052358 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-sxv7p" event={"ID":"aa376f72-5b9a-4087-8ea6-a5cf80be315b","Type":"ContainerStarted","Data":"f2e5da4c2c5391ca5c10db2c922ed5a76e4db96fff83ed69cda78e095f1ba876"} Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.052454 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-88c7-sxv7p" Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.053834 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-84697dfb4d-w86tm" event={"ID":"32ebad0d-e677-4e33-b555-44db1541becc","Type":"ContainerStarted","Data":"1c0edac501ea446f30c11ea098e46396a7c8e0a412f239a5a8c7aef9467f36ad"} Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.057761 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-n44h7" event={"ID":"e5254af8-649d-413f-b146-51c982f48073","Type":"ContainerStarted","Data":"c2d7afd9f5c6ccd28565d3db4eb7820e06ed41ca054f3874ea590103a48e269d"} Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.058308 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-n44h7" Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.060875 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-ph95z" podStartSLOduration=4.106507189 podStartE2EDuration="13.060856712s" podCreationTimestamp="2025-09-30 12:34:22 +0000 UTC" firstStartedPulling="2025-09-30 12:34:23.812099931 +0000 UTC m=+838.061782077" lastFinishedPulling="2025-09-30 12:34:32.766449454 +0000 UTC m=+847.016131600" observedRunningTime="2025-09-30 12:34:35.054829517 +0000 UTC m=+849.304511683" watchObservedRunningTime="2025-09-30 12:34:35.060856712 +0000 UTC m=+849.310538868" Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.072200 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-42j7m" event={"ID":"aa7bbe9e-668f-4ee3-976d-8e9ddbdbb092","Type":"ContainerStarted","Data":"669dc1c1134588557ea136cb49b5dca5cb51dbfdc707ab29f456239217068af8"} Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.072350 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-42j7m" Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.074073 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-kl4qt" event={"ID":"2607efd2-d90d-4a1e-be6c-5f3c88da67e4","Type":"ContainerStarted","Data":"8f635a088453b40ced4395cfac4c1831f19beb090d29a100b83f9d27d43ef739"} Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.076065 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-mf4jp" podStartSLOduration=3.795536278 podStartE2EDuration="13.076055358s" podCreationTimestamp="2025-09-30 12:34:22 +0000 UTC" firstStartedPulling="2025-09-30 12:34:24.020571192 +0000 UTC m=+838.270253328" lastFinishedPulling="2025-09-30 12:34:33.301090262 +0000 UTC m=+847.550772408" observedRunningTime="2025-09-30 12:34:35.075601776 +0000 UTC m=+849.325283942" watchObservedRunningTime="2025-09-30 12:34:35.076055358 +0000 UTC m=+849.325737514" Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.076364 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-chbd7" event={"ID":"d44fe72c-afa7-4442-b308-0b111e16c7b8","Type":"ContainerStarted","Data":"efe92903e2659ca1e80c48b188635a3e0d71ac2075b33c77f846e5f12e646d3f"} Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.077551 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-chbd7" Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.080595 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-57btn" event={"ID":"0d7297eb-3633-4083-9d4b-3bf8487360ca","Type":"ContainerStarted","Data":"1980bef89839b362e0f5e25465e4ade127fd9fbbfe65aa40672cf437787f160a"} Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.080728 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-57btn" Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.092972 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-c7j6g" event={"ID":"5d79f7da-8bfd-4f26-bcf1-b4ad36a6b42f","Type":"ContainerStarted","Data":"62524a89188f94db0e87b0fd3384383d537eb44e32311c1c908c811c9cdac298"} Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.100539 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-d9jtf" event={"ID":"90cc681e-c24d-4b64-862d-3514308e77c6","Type":"ContainerStarted","Data":"023326567a71f140960fefc315e2864b89a713f976e8dbf7df77afbd20f83222"} Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.101305 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-d9jtf" Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.103951 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-88c7-sxv7p" podStartSLOduration=3.218311083 podStartE2EDuration="12.103446128s" podCreationTimestamp="2025-09-30 12:34:23 +0000 UTC" firstStartedPulling="2025-09-30 12:34:24.534589275 +0000 UTC m=+838.784271421" lastFinishedPulling="2025-09-30 12:34:33.41972432 +0000 UTC m=+847.669406466" observedRunningTime="2025-09-30 12:34:35.09805284 +0000 UTC m=+849.347734996" watchObservedRunningTime="2025-09-30 12:34:35.103446128 +0000 UTC m=+849.353128264" Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.133524 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-57btn" podStartSLOduration=3.22399873 podStartE2EDuration="12.133503421s" podCreationTimestamp="2025-09-30 12:34:23 +0000 UTC" firstStartedPulling="2025-09-30 12:34:24.512606414 +0000 UTC m=+838.762288560" lastFinishedPulling="2025-09-30 12:34:33.422111105 +0000 UTC m=+847.671793251" observedRunningTime="2025-09-30 12:34:35.133417358 +0000 UTC m=+849.383099514" watchObservedRunningTime="2025-09-30 12:34:35.133503421 +0000 UTC m=+849.383185577" Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.158949 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-42j7m" podStartSLOduration=4.3018877589999995 podStartE2EDuration="13.158928927s" podCreationTimestamp="2025-09-30 12:34:22 +0000 UTC" firstStartedPulling="2025-09-30 12:34:24.505224628 +0000 UTC m=+838.754906774" lastFinishedPulling="2025-09-30 12:34:33.362265796 +0000 UTC m=+847.611947942" observedRunningTime="2025-09-30 12:34:35.156365577 +0000 UTC m=+849.406047723" watchObservedRunningTime="2025-09-30 12:34:35.158928927 +0000 UTC m=+849.408611073" Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.202328 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-n44h7" podStartSLOduration=3.7492965160000002 podStartE2EDuration="12.202311334s" podCreationTimestamp="2025-09-30 12:34:23 +0000 UTC" firstStartedPulling="2025-09-30 12:34:24.968988634 +0000 UTC m=+839.218670780" lastFinishedPulling="2025-09-30 12:34:33.422003412 +0000 UTC m=+847.671685598" observedRunningTime="2025-09-30 12:34:35.181516356 +0000 UTC m=+849.431198522" watchObservedRunningTime="2025-09-30 12:34:35.202311334 +0000 UTC m=+849.451993480" Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.202891 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-chbd7" podStartSLOduration=3.997897472 podStartE2EDuration="13.202886541s" podCreationTimestamp="2025-09-30 12:34:22 +0000 UTC" firstStartedPulling="2025-09-30 12:34:24.096193746 +0000 UTC m=+838.345875882" lastFinishedPulling="2025-09-30 12:34:33.301182805 +0000 UTC m=+847.550864951" observedRunningTime="2025-09-30 12:34:35.20140854 +0000 UTC m=+849.451090686" watchObservedRunningTime="2025-09-30 12:34:35.202886541 +0000 UTC m=+849.452568687" Sep 30 12:34:35 crc kubenswrapper[5002]: I0930 12:34:35.220443 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-d9jtf" podStartSLOduration=3.762628558 podStartE2EDuration="12.22042642s" podCreationTimestamp="2025-09-30 12:34:23 +0000 UTC" firstStartedPulling="2025-09-30 12:34:24.964139309 +0000 UTC m=+839.213821455" lastFinishedPulling="2025-09-30 12:34:33.421937181 +0000 UTC m=+847.671619317" observedRunningTime="2025-09-30 12:34:35.219340161 +0000 UTC m=+849.469022307" watchObservedRunningTime="2025-09-30 12:34:35.22042642 +0000 UTC m=+849.470108556" Sep 30 12:34:36 crc kubenswrapper[5002]: I0930 12:34:36.108959 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-rvv9g" event={"ID":"9caa184d-b4ff-4419-8f8d-ede2b0b6845e","Type":"ContainerStarted","Data":"b988d64a9c192f8e357ed14eb1f601d9a745e2f7cbe3f3abe637a8be0fbd49eb"} Sep 30 12:34:36 crc kubenswrapper[5002]: I0930 12:34:36.109057 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-rvv9g" Sep 30 12:34:36 crc kubenswrapper[5002]: I0930 12:34:36.111225 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-m7m4h" event={"ID":"9692c8b9-1e89-47e3-972c-1af7eb8a2ebe","Type":"ContainerStarted","Data":"29c7c5049542f590aa913c60722a4aa90c8f71af2918a78b392043460c325795"} Sep 30 12:34:36 crc kubenswrapper[5002]: I0930 12:34:36.111405 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-m7m4h" Sep 30 12:34:36 crc kubenswrapper[5002]: I0930 12:34:36.113001 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-r7t4w" event={"ID":"720e0316-9060-4bd3-804c-f98017a3fb84","Type":"ContainerStarted","Data":"43200f86be0fd2c23a4f21580126eee0f1e1f7987fcf2ce8c485535b9c297d5d"} Sep 30 12:34:36 crc kubenswrapper[5002]: I0930 12:34:36.113141 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-r7t4w" Sep 30 12:34:36 crc kubenswrapper[5002]: I0930 12:34:36.115285 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-sjnlc" event={"ID":"63ee8874-1cbb-4183-b16a-f2efd8a1e7d6","Type":"ContainerStarted","Data":"cc8a57d507873998aa8849c3af48dd371e02bd4ae61a3f643f54af51b8a5a948"} Sep 30 12:34:36 crc kubenswrapper[5002]: I0930 12:34:36.115438 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-sjnlc" Sep 30 12:34:36 crc kubenswrapper[5002]: I0930 12:34:36.117442 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-kl4qt" event={"ID":"2607efd2-d90d-4a1e-be6c-5f3c88da67e4","Type":"ContainerStarted","Data":"ccc7ab0380f953df32b8a962dd59719637727d3ab6c69b3f6d69134ff991bbda"} Sep 30 12:34:36 crc kubenswrapper[5002]: I0930 12:34:36.117610 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-kl4qt" Sep 30 12:34:36 crc kubenswrapper[5002]: I0930 12:34:36.119919 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-gh66v" event={"ID":"2d9e02f5-4644-423a-a783-8dbc51d68570","Type":"ContainerStarted","Data":"fbd746f0b50728fba90389fc17464be1a88c1d7c22a1b3303e388cfbab1bd80d"} Sep 30 12:34:36 crc kubenswrapper[5002]: I0930 12:34:36.120090 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-gh66v" Sep 30 12:34:36 crc kubenswrapper[5002]: I0930 12:34:36.122126 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-84697dfb4d-w86tm" event={"ID":"32ebad0d-e677-4e33-b555-44db1541becc","Type":"ContainerStarted","Data":"e4bd5031666ca7a2112606358c42b521ff6b46d28144c753e9a886359de90d80"} Sep 30 12:34:36 crc kubenswrapper[5002]: I0930 12:34:36.122156 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-84697dfb4d-w86tm" event={"ID":"32ebad0d-e677-4e33-b555-44db1541becc","Type":"ContainerStarted","Data":"fc2e29979b1245aec90d46509a0c1da449972d0b63d996dde5e767188197c65c"} Sep 30 12:34:36 crc kubenswrapper[5002]: I0930 12:34:36.122273 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-84697dfb4d-w86tm" Sep 30 12:34:36 crc kubenswrapper[5002]: I0930 12:34:36.125831 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-c7j6g" event={"ID":"5d79f7da-8bfd-4f26-bcf1-b4ad36a6b42f","Type":"ContainerStarted","Data":"1a1d8b1bedf3ae897fbb4411ee74af02edb504c5a5c07f7c2a3d9865d1d303db"} Sep 30 12:34:36 crc kubenswrapper[5002]: I0930 12:34:36.125964 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-c7j6g" Sep 30 12:34:36 crc kubenswrapper[5002]: I0930 12:34:36.129810 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-zzt9s" event={"ID":"855868b4-991b-4f9f-b471-5b1244221192","Type":"ContainerStarted","Data":"b9bf2a1854e5e588f34b7935321c110a1aed465a7c54a418e1e411f04db36ae4"} Sep 30 12:34:36 crc kubenswrapper[5002]: I0930 12:34:36.130292 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-zzt9s" Sep 30 12:34:36 crc kubenswrapper[5002]: I0930 12:34:36.132886 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-rvv9g" podStartSLOduration=4.754055888 podStartE2EDuration="14.132869482s" podCreationTimestamp="2025-09-30 12:34:22 +0000 UTC" firstStartedPulling="2025-09-30 12:34:24.011298225 +0000 UTC m=+838.260980371" lastFinishedPulling="2025-09-30 12:34:33.390111819 +0000 UTC m=+847.639793965" observedRunningTime="2025-09-30 12:34:36.129907582 +0000 UTC m=+850.379589798" watchObservedRunningTime="2025-09-30 12:34:36.132869482 +0000 UTC m=+850.382551628" Sep 30 12:34:36 crc kubenswrapper[5002]: I0930 12:34:36.135759 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-d9jtf" event={"ID":"90cc681e-c24d-4b64-862d-3514308e77c6","Type":"ContainerStarted","Data":"49bf18002842a6df20d39932a1f1f72e331bf05d5af7e573db569b5cc767c96e"} Sep 30 12:34:36 crc kubenswrapper[5002]: I0930 12:34:36.153048 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-r7t4w" podStartSLOduration=5.212359652 podStartE2EDuration="14.153017474s" podCreationTimestamp="2025-09-30 12:34:22 +0000 UTC" firstStartedPulling="2025-09-30 12:34:24.449265382 +0000 UTC m=+838.698947528" lastFinishedPulling="2025-09-30 12:34:33.389923184 +0000 UTC m=+847.639605350" observedRunningTime="2025-09-30 12:34:36.14886242 +0000 UTC m=+850.398544596" watchObservedRunningTime="2025-09-30 12:34:36.153017474 +0000 UTC m=+850.402699660" Sep 30 12:34:36 crc kubenswrapper[5002]: I0930 12:34:36.174085 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-m7m4h" podStartSLOduration=4.322392854 podStartE2EDuration="13.1740624s" podCreationTimestamp="2025-09-30 12:34:23 +0000 UTC" firstStartedPulling="2025-09-30 12:34:24.539237115 +0000 UTC m=+838.788919261" lastFinishedPulling="2025-09-30 12:34:33.390906661 +0000 UTC m=+847.640588807" observedRunningTime="2025-09-30 12:34:36.171918212 +0000 UTC m=+850.421600428" watchObservedRunningTime="2025-09-30 12:34:36.1740624 +0000 UTC m=+850.423744566" Sep 30 12:34:36 crc kubenswrapper[5002]: I0930 12:34:36.215010 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-c7j6g" podStartSLOduration=4.814429247 podStartE2EDuration="13.214979431s" podCreationTimestamp="2025-09-30 12:34:23 +0000 UTC" firstStartedPulling="2025-09-30 12:34:25.030544427 +0000 UTC m=+839.280226573" lastFinishedPulling="2025-09-30 12:34:33.431094611 +0000 UTC m=+847.680776757" observedRunningTime="2025-09-30 12:34:36.208708299 +0000 UTC m=+850.458390515" watchObservedRunningTime="2025-09-30 12:34:36.214979431 +0000 UTC m=+850.464661597" Sep 30 12:34:36 crc kubenswrapper[5002]: I0930 12:34:36.233794 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-zzt9s" podStartSLOduration=4.347598323 podStartE2EDuration="13.233767885s" podCreationTimestamp="2025-09-30 12:34:23 +0000 UTC" firstStartedPulling="2025-09-30 12:34:24.535926003 +0000 UTC m=+838.785608149" lastFinishedPulling="2025-09-30 12:34:33.422095565 +0000 UTC m=+847.671777711" observedRunningTime="2025-09-30 12:34:36.233011115 +0000 UTC m=+850.482693291" watchObservedRunningTime="2025-09-30 12:34:36.233767885 +0000 UTC m=+850.483450071" Sep 30 12:34:36 crc kubenswrapper[5002]: I0930 12:34:36.254346 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-gh66v" podStartSLOduration=4.34475531 podStartE2EDuration="13.254318628s" podCreationTimestamp="2025-09-30 12:34:23 +0000 UTC" firstStartedPulling="2025-09-30 12:34:24.521542363 +0000 UTC m=+838.771224509" lastFinishedPulling="2025-09-30 12:34:33.431105681 +0000 UTC m=+847.680787827" observedRunningTime="2025-09-30 12:34:36.252081156 +0000 UTC m=+850.501763312" watchObservedRunningTime="2025-09-30 12:34:36.254318628 +0000 UTC m=+850.504000814" Sep 30 12:34:36 crc kubenswrapper[5002]: I0930 12:34:36.283605 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-84697dfb4d-w86tm" podStartSLOduration=13.283582679 podStartE2EDuration="13.283582679s" podCreationTimestamp="2025-09-30 12:34:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:34:36.276378222 +0000 UTC m=+850.526060388" watchObservedRunningTime="2025-09-30 12:34:36.283582679 +0000 UTC m=+850.533264825" Sep 30 12:34:36 crc kubenswrapper[5002]: I0930 12:34:36.302805 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-sjnlc" podStartSLOduration=5.376340867 podStartE2EDuration="14.302782204s" podCreationTimestamp="2025-09-30 12:34:22 +0000 UTC" firstStartedPulling="2025-09-30 12:34:24.464256139 +0000 UTC m=+838.713938285" lastFinishedPulling="2025-09-30 12:34:33.390697446 +0000 UTC m=+847.640379622" observedRunningTime="2025-09-30 12:34:36.297218242 +0000 UTC m=+850.546900448" watchObservedRunningTime="2025-09-30 12:34:36.302782204 +0000 UTC m=+850.552464350" Sep 30 12:34:36 crc kubenswrapper[5002]: I0930 12:34:36.312285 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-kl4qt" podStartSLOduration=4.858350916 podStartE2EDuration="13.312271464s" podCreationTimestamp="2025-09-30 12:34:23 +0000 UTC" firstStartedPulling="2025-09-30 12:34:24.936409207 +0000 UTC m=+839.186091353" lastFinishedPulling="2025-09-30 12:34:33.390329755 +0000 UTC m=+847.640011901" observedRunningTime="2025-09-30 12:34:36.311175304 +0000 UTC m=+850.560857470" watchObservedRunningTime="2025-09-30 12:34:36.312271464 +0000 UTC m=+850.561953610" Sep 30 12:34:41 crc kubenswrapper[5002]: I0930 12:34:41.176540 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-xwlmz" event={"ID":"38eab40a-26bf-4c1b-8911-4d6672629e3e","Type":"ContainerStarted","Data":"cd343e8f3fb53f62d42359bd5c7eba78afc2554dfa33f902749411df3ab4bc48"} Sep 30 12:34:41 crc kubenswrapper[5002]: I0930 12:34:41.178032 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-xwlmz" Sep 30 12:34:41 crc kubenswrapper[5002]: I0930 12:34:41.196548 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-xwlmz" podStartSLOduration=2.9485297470000003 podStartE2EDuration="18.19652999s" podCreationTimestamp="2025-09-30 12:34:23 +0000 UTC" firstStartedPulling="2025-09-30 12:34:24.977565483 +0000 UTC m=+839.227247629" lastFinishedPulling="2025-09-30 12:34:40.225565716 +0000 UTC m=+854.475247872" observedRunningTime="2025-09-30 12:34:41.192607822 +0000 UTC m=+855.442289988" watchObservedRunningTime="2025-09-30 12:34:41.19652999 +0000 UTC m=+855.446212136" Sep 30 12:34:43 crc kubenswrapper[5002]: I0930 12:34:43.179521 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-mf4jp" Sep 30 12:34:43 crc kubenswrapper[5002]: I0930 12:34:43.195851 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-ph95z" Sep 30 12:34:43 crc kubenswrapper[5002]: I0930 12:34:43.221971 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-rvv9g" Sep 30 12:34:43 crc kubenswrapper[5002]: I0930 12:34:43.310450 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-chbd7" Sep 30 12:34:43 crc kubenswrapper[5002]: I0930 12:34:43.388661 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-sjnlc" Sep 30 12:34:43 crc kubenswrapper[5002]: I0930 12:34:43.392982 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-42j7m" Sep 30 12:34:43 crc kubenswrapper[5002]: I0930 12:34:43.454128 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-r7t4w" Sep 30 12:34:43 crc kubenswrapper[5002]: I0930 12:34:43.528486 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-m7m4h" Sep 30 12:34:43 crc kubenswrapper[5002]: I0930 12:34:43.552711 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-57btn" Sep 30 12:34:43 crc kubenswrapper[5002]: I0930 12:34:43.615054 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-zzt9s" Sep 30 12:34:43 crc kubenswrapper[5002]: I0930 12:34:43.694102 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-88c7-sxv7p" Sep 30 12:34:43 crc kubenswrapper[5002]: I0930 12:34:43.711950 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-gh66v" Sep 30 12:34:43 crc kubenswrapper[5002]: I0930 12:34:43.843424 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-kl4qt" Sep 30 12:34:43 crc kubenswrapper[5002]: I0930 12:34:43.889411 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-d9jtf" Sep 30 12:34:43 crc kubenswrapper[5002]: I0930 12:34:43.993695 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-n44h7" Sep 30 12:34:44 crc kubenswrapper[5002]: I0930 12:34:44.136066 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-c7j6g" Sep 30 12:34:45 crc kubenswrapper[5002]: I0930 12:34:45.208221 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-qcq76" event={"ID":"7c75d96a-d0a7-4f12-9799-4d01ee215248","Type":"ContainerStarted","Data":"83f57c529e5d1579c656db2f4fb078fffa8c50ac65f24b24e0a9dfd02dcf205c"} Sep 30 12:34:45 crc kubenswrapper[5002]: I0930 12:34:45.208844 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-qcq76" Sep 30 12:34:45 crc kubenswrapper[5002]: I0930 12:34:45.211178 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-7662l" event={"ID":"78f98b71-27e2-411c-b610-8b4be1068d5a","Type":"ContainerStarted","Data":"4da37b00414c479a22781996327c691e82cf209ba372481d79d5a6bf53654015"} Sep 30 12:34:45 crc kubenswrapper[5002]: I0930 12:34:45.211348 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-7662l" Sep 30 12:34:45 crc kubenswrapper[5002]: I0930 12:34:45.212980 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-h9dzh" event={"ID":"b4044bb7-6e63-45d0-9640-7966bcd23aa9","Type":"ContainerStarted","Data":"68794d8ac19d3f43586c49e323c6192bbe18e0e1032023dd95228fc55cb7780f"} Sep 30 12:34:45 crc kubenswrapper[5002]: I0930 12:34:45.227845 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-qcq76" podStartSLOduration=3.871591374 podStartE2EDuration="23.227825422s" podCreationTimestamp="2025-09-30 12:34:22 +0000 UTC" firstStartedPulling="2025-09-30 12:34:25.007000642 +0000 UTC m=+839.256682798" lastFinishedPulling="2025-09-30 12:34:44.3632347 +0000 UTC m=+858.612916846" observedRunningTime="2025-09-30 12:34:45.226016282 +0000 UTC m=+859.475698458" watchObservedRunningTime="2025-09-30 12:34:45.227825422 +0000 UTC m=+859.477507578" Sep 30 12:34:45 crc kubenswrapper[5002]: I0930 12:34:45.247226 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-7662l" podStartSLOduration=2.889814773 podStartE2EDuration="22.247206093s" podCreationTimestamp="2025-09-30 12:34:23 +0000 UTC" firstStartedPulling="2025-09-30 12:34:25.006971441 +0000 UTC m=+839.256653587" lastFinishedPulling="2025-09-30 12:34:44.364362761 +0000 UTC m=+858.614044907" observedRunningTime="2025-09-30 12:34:45.240338414 +0000 UTC m=+859.490020580" watchObservedRunningTime="2025-09-30 12:34:45.247206093 +0000 UTC m=+859.496888249" Sep 30 12:34:45 crc kubenswrapper[5002]: I0930 12:34:45.261886 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-h9dzh" podStartSLOduration=2.937363225 podStartE2EDuration="22.261860353s" podCreationTimestamp="2025-09-30 12:34:23 +0000 UTC" firstStartedPulling="2025-09-30 12:34:25.05545943 +0000 UTC m=+839.305141576" lastFinishedPulling="2025-09-30 12:34:44.379956558 +0000 UTC m=+858.629638704" observedRunningTime="2025-09-30 12:34:45.258850231 +0000 UTC m=+859.508532397" watchObservedRunningTime="2025-09-30 12:34:45.261860353 +0000 UTC m=+859.511542509" Sep 30 12:34:45 crc kubenswrapper[5002]: I0930 12:34:45.583543 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-84697dfb4d-w86tm" Sep 30 12:34:47 crc kubenswrapper[5002]: I0930 12:34:47.227737 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-hcr66" event={"ID":"6413b1d0-7f0f-4bca-88e9-90a9d78bff9c","Type":"ContainerStarted","Data":"86af246396326786754963a5bdb5b1c75c298d66d3ebaa7788b76d7fd5e5cf23"} Sep 30 12:34:47 crc kubenswrapper[5002]: I0930 12:34:47.227996 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-hcr66" Sep 30 12:34:47 crc kubenswrapper[5002]: I0930 12:34:47.229120 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-z2dcj" event={"ID":"0fa1c573-cbf9-43f1-8106-7cf73e93f1f3","Type":"ContainerStarted","Data":"2e486c38071fbd6a0c6442259f2a058f2f4e64d24d520e2d29af155f00ff481c"} Sep 30 12:34:47 crc kubenswrapper[5002]: I0930 12:34:47.229457 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-f66b554c6-z2dcj" Sep 30 12:34:47 crc kubenswrapper[5002]: I0930 12:34:47.254777 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-hcr66" podStartSLOduration=3.189531643 podStartE2EDuration="24.254761216s" podCreationTimestamp="2025-09-30 12:34:23 +0000 UTC" firstStartedPulling="2025-09-30 12:34:24.997626821 +0000 UTC m=+839.247308967" lastFinishedPulling="2025-09-30 12:34:46.062856374 +0000 UTC m=+860.312538540" observedRunningTime="2025-09-30 12:34:47.25013321 +0000 UTC m=+861.499815356" watchObservedRunningTime="2025-09-30 12:34:47.254761216 +0000 UTC m=+861.504443362" Sep 30 12:34:47 crc kubenswrapper[5002]: I0930 12:34:47.269186 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-f66b554c6-z2dcj" podStartSLOduration=3.211905081 podStartE2EDuration="24.269170171s" podCreationTimestamp="2025-09-30 12:34:23 +0000 UTC" firstStartedPulling="2025-09-30 12:34:25.006960141 +0000 UTC m=+839.256642287" lastFinishedPulling="2025-09-30 12:34:46.064225221 +0000 UTC m=+860.313907377" observedRunningTime="2025-09-30 12:34:47.264223695 +0000 UTC m=+861.513905861" watchObservedRunningTime="2025-09-30 12:34:47.269170171 +0000 UTC m=+861.518852317" Sep 30 12:34:53 crc kubenswrapper[5002]: I0930 12:34:53.692750 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-hcr66" Sep 30 12:34:53 crc kubenswrapper[5002]: I0930 12:34:53.763111 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-xwlmz" Sep 30 12:34:53 crc kubenswrapper[5002]: I0930 12:34:53.900884 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-7662l" Sep 30 12:34:53 crc kubenswrapper[5002]: I0930 12:34:53.944467 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-f66b554c6-z2dcj" Sep 30 12:34:54 crc kubenswrapper[5002]: I0930 12:34:54.013266 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-qcq76" Sep 30 12:35:02 crc kubenswrapper[5002]: I0930 12:35:02.098555 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:35:02 crc kubenswrapper[5002]: I0930 12:35:02.099138 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:35:10 crc kubenswrapper[5002]: I0930 12:35:10.501800 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-gpdm5"] Sep 30 12:35:10 crc kubenswrapper[5002]: I0930 12:35:10.508329 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-gpdm5" Sep 30 12:35:10 crc kubenswrapper[5002]: I0930 12:35:10.511313 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-jv8j4" Sep 30 12:35:10 crc kubenswrapper[5002]: I0930 12:35:10.511593 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Sep 30 12:35:10 crc kubenswrapper[5002]: I0930 12:35:10.511704 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Sep 30 12:35:10 crc kubenswrapper[5002]: I0930 12:35:10.511825 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Sep 30 12:35:10 crc kubenswrapper[5002]: I0930 12:35:10.512299 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-gpdm5"] Sep 30 12:35:10 crc kubenswrapper[5002]: I0930 12:35:10.551132 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-t7dh5"] Sep 30 12:35:10 crc kubenswrapper[5002]: I0930 12:35:10.552621 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-t7dh5" Sep 30 12:35:10 crc kubenswrapper[5002]: I0930 12:35:10.555000 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Sep 30 12:35:10 crc kubenswrapper[5002]: I0930 12:35:10.556395 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-t7dh5"] Sep 30 12:35:10 crc kubenswrapper[5002]: I0930 12:35:10.638524 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/386f8b04-affa-45f6-b394-f4f83cda6cb7-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-t7dh5\" (UID: \"386f8b04-affa-45f6-b394-f4f83cda6cb7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-t7dh5" Sep 30 12:35:10 crc kubenswrapper[5002]: I0930 12:35:10.638583 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/386f8b04-affa-45f6-b394-f4f83cda6cb7-config\") pod \"dnsmasq-dns-78dd6ddcc-t7dh5\" (UID: \"386f8b04-affa-45f6-b394-f4f83cda6cb7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-t7dh5" Sep 30 12:35:10 crc kubenswrapper[5002]: I0930 12:35:10.638645 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssb9m\" (UniqueName: \"kubernetes.io/projected/9e945411-e51f-48af-ac88-de0fff99c16e-kube-api-access-ssb9m\") pod \"dnsmasq-dns-675f4bcbfc-gpdm5\" (UID: \"9e945411-e51f-48af-ac88-de0fff99c16e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-gpdm5" Sep 30 12:35:10 crc kubenswrapper[5002]: I0930 12:35:10.638671 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5xjq\" (UniqueName: \"kubernetes.io/projected/386f8b04-affa-45f6-b394-f4f83cda6cb7-kube-api-access-n5xjq\") pod \"dnsmasq-dns-78dd6ddcc-t7dh5\" (UID: \"386f8b04-affa-45f6-b394-f4f83cda6cb7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-t7dh5" Sep 30 12:35:10 crc kubenswrapper[5002]: I0930 12:35:10.638734 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e945411-e51f-48af-ac88-de0fff99c16e-config\") pod \"dnsmasq-dns-675f4bcbfc-gpdm5\" (UID: \"9e945411-e51f-48af-ac88-de0fff99c16e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-gpdm5" Sep 30 12:35:10 crc kubenswrapper[5002]: I0930 12:35:10.740546 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5xjq\" (UniqueName: \"kubernetes.io/projected/386f8b04-affa-45f6-b394-f4f83cda6cb7-kube-api-access-n5xjq\") pod \"dnsmasq-dns-78dd6ddcc-t7dh5\" (UID: \"386f8b04-affa-45f6-b394-f4f83cda6cb7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-t7dh5" Sep 30 12:35:10 crc kubenswrapper[5002]: I0930 12:35:10.740755 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e945411-e51f-48af-ac88-de0fff99c16e-config\") pod \"dnsmasq-dns-675f4bcbfc-gpdm5\" (UID: \"9e945411-e51f-48af-ac88-de0fff99c16e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-gpdm5" Sep 30 12:35:10 crc kubenswrapper[5002]: I0930 12:35:10.740780 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/386f8b04-affa-45f6-b394-f4f83cda6cb7-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-t7dh5\" (UID: \"386f8b04-affa-45f6-b394-f4f83cda6cb7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-t7dh5" Sep 30 12:35:10 crc kubenswrapper[5002]: I0930 12:35:10.740817 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/386f8b04-affa-45f6-b394-f4f83cda6cb7-config\") pod \"dnsmasq-dns-78dd6ddcc-t7dh5\" (UID: \"386f8b04-affa-45f6-b394-f4f83cda6cb7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-t7dh5" Sep 30 12:35:10 crc kubenswrapper[5002]: I0930 12:35:10.741805 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/386f8b04-affa-45f6-b394-f4f83cda6cb7-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-t7dh5\" (UID: \"386f8b04-affa-45f6-b394-f4f83cda6cb7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-t7dh5" Sep 30 12:35:10 crc kubenswrapper[5002]: I0930 12:35:10.741855 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e945411-e51f-48af-ac88-de0fff99c16e-config\") pod \"dnsmasq-dns-675f4bcbfc-gpdm5\" (UID: \"9e945411-e51f-48af-ac88-de0fff99c16e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-gpdm5" Sep 30 12:35:10 crc kubenswrapper[5002]: I0930 12:35:10.742353 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/386f8b04-affa-45f6-b394-f4f83cda6cb7-config\") pod \"dnsmasq-dns-78dd6ddcc-t7dh5\" (UID: \"386f8b04-affa-45f6-b394-f4f83cda6cb7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-t7dh5" Sep 30 12:35:10 crc kubenswrapper[5002]: I0930 12:35:10.742801 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssb9m\" (UniqueName: \"kubernetes.io/projected/9e945411-e51f-48af-ac88-de0fff99c16e-kube-api-access-ssb9m\") pod \"dnsmasq-dns-675f4bcbfc-gpdm5\" (UID: \"9e945411-e51f-48af-ac88-de0fff99c16e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-gpdm5" Sep 30 12:35:10 crc kubenswrapper[5002]: I0930 12:35:10.761059 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssb9m\" (UniqueName: \"kubernetes.io/projected/9e945411-e51f-48af-ac88-de0fff99c16e-kube-api-access-ssb9m\") pod \"dnsmasq-dns-675f4bcbfc-gpdm5\" (UID: \"9e945411-e51f-48af-ac88-de0fff99c16e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-gpdm5" Sep 30 12:35:10 crc kubenswrapper[5002]: I0930 12:35:10.765520 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5xjq\" (UniqueName: \"kubernetes.io/projected/386f8b04-affa-45f6-b394-f4f83cda6cb7-kube-api-access-n5xjq\") pod \"dnsmasq-dns-78dd6ddcc-t7dh5\" (UID: \"386f8b04-affa-45f6-b394-f4f83cda6cb7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-t7dh5" Sep 30 12:35:10 crc kubenswrapper[5002]: I0930 12:35:10.839830 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-gpdm5" Sep 30 12:35:10 crc kubenswrapper[5002]: I0930 12:35:10.869802 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-t7dh5" Sep 30 12:35:11 crc kubenswrapper[5002]: I0930 12:35:11.294393 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-gpdm5"] Sep 30 12:35:11 crc kubenswrapper[5002]: I0930 12:35:11.357670 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-t7dh5"] Sep 30 12:35:11 crc kubenswrapper[5002]: I0930 12:35:11.416095 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-gpdm5" event={"ID":"9e945411-e51f-48af-ac88-de0fff99c16e","Type":"ContainerStarted","Data":"cb036afd39ec1a862a110f81d0a42c4cd8e825156b761693bf3bf725b037d4ff"} Sep 30 12:35:11 crc kubenswrapper[5002]: I0930 12:35:11.417279 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-t7dh5" event={"ID":"386f8b04-affa-45f6-b394-f4f83cda6cb7","Type":"ContainerStarted","Data":"391f4446362fde8f5b95b1845b96f8642f1c0102eef38640f9f67b2907046869"} Sep 30 12:35:13 crc kubenswrapper[5002]: I0930 12:35:13.453434 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-gpdm5"] Sep 30 12:35:13 crc kubenswrapper[5002]: I0930 12:35:13.476494 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-dkhh2"] Sep 30 12:35:13 crc kubenswrapper[5002]: I0930 12:35:13.477734 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-dkhh2" Sep 30 12:35:13 crc kubenswrapper[5002]: I0930 12:35:13.502944 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-dkhh2"] Sep 30 12:35:13 crc kubenswrapper[5002]: I0930 12:35:13.599263 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dde3e2f9-15fd-4f59-8570-02fa37c770e0-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-dkhh2\" (UID: \"dde3e2f9-15fd-4f59-8570-02fa37c770e0\") " pod="openstack/dnsmasq-dns-5ccc8479f9-dkhh2" Sep 30 12:35:13 crc kubenswrapper[5002]: I0930 12:35:13.599354 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pgcs\" (UniqueName: \"kubernetes.io/projected/dde3e2f9-15fd-4f59-8570-02fa37c770e0-kube-api-access-6pgcs\") pod \"dnsmasq-dns-5ccc8479f9-dkhh2\" (UID: \"dde3e2f9-15fd-4f59-8570-02fa37c770e0\") " pod="openstack/dnsmasq-dns-5ccc8479f9-dkhh2" Sep 30 12:35:13 crc kubenswrapper[5002]: I0930 12:35:13.599392 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dde3e2f9-15fd-4f59-8570-02fa37c770e0-config\") pod \"dnsmasq-dns-5ccc8479f9-dkhh2\" (UID: \"dde3e2f9-15fd-4f59-8570-02fa37c770e0\") " pod="openstack/dnsmasq-dns-5ccc8479f9-dkhh2" Sep 30 12:35:13 crc kubenswrapper[5002]: I0930 12:35:13.700936 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dde3e2f9-15fd-4f59-8570-02fa37c770e0-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-dkhh2\" (UID: \"dde3e2f9-15fd-4f59-8570-02fa37c770e0\") " pod="openstack/dnsmasq-dns-5ccc8479f9-dkhh2" Sep 30 12:35:13 crc kubenswrapper[5002]: I0930 12:35:13.701010 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pgcs\" (UniqueName: \"kubernetes.io/projected/dde3e2f9-15fd-4f59-8570-02fa37c770e0-kube-api-access-6pgcs\") pod \"dnsmasq-dns-5ccc8479f9-dkhh2\" (UID: \"dde3e2f9-15fd-4f59-8570-02fa37c770e0\") " pod="openstack/dnsmasq-dns-5ccc8479f9-dkhh2" Sep 30 12:35:13 crc kubenswrapper[5002]: I0930 12:35:13.701032 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dde3e2f9-15fd-4f59-8570-02fa37c770e0-config\") pod \"dnsmasq-dns-5ccc8479f9-dkhh2\" (UID: \"dde3e2f9-15fd-4f59-8570-02fa37c770e0\") " pod="openstack/dnsmasq-dns-5ccc8479f9-dkhh2" Sep 30 12:35:13 crc kubenswrapper[5002]: I0930 12:35:13.701875 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dde3e2f9-15fd-4f59-8570-02fa37c770e0-config\") pod \"dnsmasq-dns-5ccc8479f9-dkhh2\" (UID: \"dde3e2f9-15fd-4f59-8570-02fa37c770e0\") " pod="openstack/dnsmasq-dns-5ccc8479f9-dkhh2" Sep 30 12:35:13 crc kubenswrapper[5002]: I0930 12:35:13.702796 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dde3e2f9-15fd-4f59-8570-02fa37c770e0-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-dkhh2\" (UID: \"dde3e2f9-15fd-4f59-8570-02fa37c770e0\") " pod="openstack/dnsmasq-dns-5ccc8479f9-dkhh2" Sep 30 12:35:13 crc kubenswrapper[5002]: I0930 12:35:13.718762 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-t7dh5"] Sep 30 12:35:13 crc kubenswrapper[5002]: I0930 12:35:13.751631 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pgcs\" (UniqueName: \"kubernetes.io/projected/dde3e2f9-15fd-4f59-8570-02fa37c770e0-kube-api-access-6pgcs\") pod \"dnsmasq-dns-5ccc8479f9-dkhh2\" (UID: \"dde3e2f9-15fd-4f59-8570-02fa37c770e0\") " pod="openstack/dnsmasq-dns-5ccc8479f9-dkhh2" Sep 30 12:35:13 crc kubenswrapper[5002]: I0930 12:35:13.758234 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-mp9f9"] Sep 30 12:35:13 crc kubenswrapper[5002]: I0930 12:35:13.759713 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-mp9f9" Sep 30 12:35:13 crc kubenswrapper[5002]: I0930 12:35:13.776521 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-mp9f9"] Sep 30 12:35:13 crc kubenswrapper[5002]: I0930 12:35:13.808935 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-dkhh2" Sep 30 12:35:13 crc kubenswrapper[5002]: I0930 12:35:13.903265 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f69d7010-1d73-410f-a5f1-5aed01494f42-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-mp9f9\" (UID: \"f69d7010-1d73-410f-a5f1-5aed01494f42\") " pod="openstack/dnsmasq-dns-57d769cc4f-mp9f9" Sep 30 12:35:13 crc kubenswrapper[5002]: I0930 12:35:13.903320 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwl62\" (UniqueName: \"kubernetes.io/projected/f69d7010-1d73-410f-a5f1-5aed01494f42-kube-api-access-jwl62\") pod \"dnsmasq-dns-57d769cc4f-mp9f9\" (UID: \"f69d7010-1d73-410f-a5f1-5aed01494f42\") " pod="openstack/dnsmasq-dns-57d769cc4f-mp9f9" Sep 30 12:35:13 crc kubenswrapper[5002]: I0930 12:35:13.903356 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f69d7010-1d73-410f-a5f1-5aed01494f42-config\") pod \"dnsmasq-dns-57d769cc4f-mp9f9\" (UID: \"f69d7010-1d73-410f-a5f1-5aed01494f42\") " pod="openstack/dnsmasq-dns-57d769cc4f-mp9f9" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.004110 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f69d7010-1d73-410f-a5f1-5aed01494f42-config\") pod \"dnsmasq-dns-57d769cc4f-mp9f9\" (UID: \"f69d7010-1d73-410f-a5f1-5aed01494f42\") " pod="openstack/dnsmasq-dns-57d769cc4f-mp9f9" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.004225 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f69d7010-1d73-410f-a5f1-5aed01494f42-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-mp9f9\" (UID: \"f69d7010-1d73-410f-a5f1-5aed01494f42\") " pod="openstack/dnsmasq-dns-57d769cc4f-mp9f9" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.004259 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwl62\" (UniqueName: \"kubernetes.io/projected/f69d7010-1d73-410f-a5f1-5aed01494f42-kube-api-access-jwl62\") pod \"dnsmasq-dns-57d769cc4f-mp9f9\" (UID: \"f69d7010-1d73-410f-a5f1-5aed01494f42\") " pod="openstack/dnsmasq-dns-57d769cc4f-mp9f9" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.006021 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f69d7010-1d73-410f-a5f1-5aed01494f42-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-mp9f9\" (UID: \"f69d7010-1d73-410f-a5f1-5aed01494f42\") " pod="openstack/dnsmasq-dns-57d769cc4f-mp9f9" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.006447 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f69d7010-1d73-410f-a5f1-5aed01494f42-config\") pod \"dnsmasq-dns-57d769cc4f-mp9f9\" (UID: \"f69d7010-1d73-410f-a5f1-5aed01494f42\") " pod="openstack/dnsmasq-dns-57d769cc4f-mp9f9" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.031573 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwl62\" (UniqueName: \"kubernetes.io/projected/f69d7010-1d73-410f-a5f1-5aed01494f42-kube-api-access-jwl62\") pod \"dnsmasq-dns-57d769cc4f-mp9f9\" (UID: \"f69d7010-1d73-410f-a5f1-5aed01494f42\") " pod="openstack/dnsmasq-dns-57d769cc4f-mp9f9" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.080618 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-mp9f9" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.268110 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-dkhh2"] Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.535725 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-mp9f9"] Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.616848 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.618379 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.621924 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.621925 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-s489v" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.622083 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.622087 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.622155 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.622491 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.627744 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.634821 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.713596 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/26e7b906-c14a-4084-926c-2d2c7ce201be-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.713651 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/26e7b906-c14a-4084-926c-2d2c7ce201be-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.713678 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/26e7b906-c14a-4084-926c-2d2c7ce201be-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.713701 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/26e7b906-c14a-4084-926c-2d2c7ce201be-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.713736 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfl92\" (UniqueName: \"kubernetes.io/projected/26e7b906-c14a-4084-926c-2d2c7ce201be-kube-api-access-cfl92\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.713761 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/26e7b906-c14a-4084-926c-2d2c7ce201be-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.713824 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.713856 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/26e7b906-c14a-4084-926c-2d2c7ce201be-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.713896 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/26e7b906-c14a-4084-926c-2d2c7ce201be-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.713926 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/26e7b906-c14a-4084-926c-2d2c7ce201be-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.713958 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/26e7b906-c14a-4084-926c-2d2c7ce201be-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.815383 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/26e7b906-c14a-4084-926c-2d2c7ce201be-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.815442 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/26e7b906-c14a-4084-926c-2d2c7ce201be-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.815485 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/26e7b906-c14a-4084-926c-2d2c7ce201be-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.815502 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/26e7b906-c14a-4084-926c-2d2c7ce201be-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.815723 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/26e7b906-c14a-4084-926c-2d2c7ce201be-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.815754 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/26e7b906-c14a-4084-926c-2d2c7ce201be-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.815782 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/26e7b906-c14a-4084-926c-2d2c7ce201be-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.815808 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/26e7b906-c14a-4084-926c-2d2c7ce201be-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.815823 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfl92\" (UniqueName: \"kubernetes.io/projected/26e7b906-c14a-4084-926c-2d2c7ce201be-kube-api-access-cfl92\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.815852 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/26e7b906-c14a-4084-926c-2d2c7ce201be-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.815881 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.816195 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.818984 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/26e7b906-c14a-4084-926c-2d2c7ce201be-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.820095 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/26e7b906-c14a-4084-926c-2d2c7ce201be-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.820345 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/26e7b906-c14a-4084-926c-2d2c7ce201be-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.820857 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/26e7b906-c14a-4084-926c-2d2c7ce201be-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.822200 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/26e7b906-c14a-4084-926c-2d2c7ce201be-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.834527 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/26e7b906-c14a-4084-926c-2d2c7ce201be-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.844140 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/26e7b906-c14a-4084-926c-2d2c7ce201be-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.854437 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/26e7b906-c14a-4084-926c-2d2c7ce201be-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.858706 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/26e7b906-c14a-4084-926c-2d2c7ce201be-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.862260 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfl92\" (UniqueName: \"kubernetes.io/projected/26e7b906-c14a-4084-926c-2d2c7ce201be-kube-api-access-cfl92\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.865696 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.890413 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.898604 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.900786 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-sjczl" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.900985 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.901317 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.901570 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.901645 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.902882 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.903011 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.921718 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 12:35:14 crc kubenswrapper[5002]: I0930 12:35:14.950286 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.021214 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f80ae063-860a-4997-9c9f-57bc3a850e37-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.021255 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f80ae063-860a-4997-9c9f-57bc3a850e37-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.021392 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f80ae063-860a-4997-9c9f-57bc3a850e37-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.021423 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f80ae063-860a-4997-9c9f-57bc3a850e37-pod-info\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.021518 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.021567 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f80ae063-860a-4997-9c9f-57bc3a850e37-server-conf\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.021809 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f80ae063-860a-4997-9c9f-57bc3a850e37-config-data\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.021849 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f80ae063-860a-4997-9c9f-57bc3a850e37-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.021879 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f80ae063-860a-4997-9c9f-57bc3a850e37-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.021931 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5gmm\" (UniqueName: \"kubernetes.io/projected/f80ae063-860a-4997-9c9f-57bc3a850e37-kube-api-access-v5gmm\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.021988 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f80ae063-860a-4997-9c9f-57bc3a850e37-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.123273 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f80ae063-860a-4997-9c9f-57bc3a850e37-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.123619 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f80ae063-860a-4997-9c9f-57bc3a850e37-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.123640 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f80ae063-860a-4997-9c9f-57bc3a850e37-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.124369 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f80ae063-860a-4997-9c9f-57bc3a850e37-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.124440 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f80ae063-860a-4997-9c9f-57bc3a850e37-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.124771 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f80ae063-860a-4997-9c9f-57bc3a850e37-pod-info\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.124711 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f80ae063-860a-4997-9c9f-57bc3a850e37-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.124840 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.125062 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.124873 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f80ae063-860a-4997-9c9f-57bc3a850e37-server-conf\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.125306 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f80ae063-860a-4997-9c9f-57bc3a850e37-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.125326 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f80ae063-860a-4997-9c9f-57bc3a850e37-config-data\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.125621 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f80ae063-860a-4997-9c9f-57bc3a850e37-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.125703 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f80ae063-860a-4997-9c9f-57bc3a850e37-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.125759 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5gmm\" (UniqueName: \"kubernetes.io/projected/f80ae063-860a-4997-9c9f-57bc3a850e37-kube-api-access-v5gmm\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.126112 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f80ae063-860a-4997-9c9f-57bc3a850e37-server-conf\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.126904 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f80ae063-860a-4997-9c9f-57bc3a850e37-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.127101 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f80ae063-860a-4997-9c9f-57bc3a850e37-config-data\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.128258 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f80ae063-860a-4997-9c9f-57bc3a850e37-pod-info\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.143521 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f80ae063-860a-4997-9c9f-57bc3a850e37-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.144130 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f80ae063-860a-4997-9c9f-57bc3a850e37-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.144383 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5gmm\" (UniqueName: \"kubernetes.io/projected/f80ae063-860a-4997-9c9f-57bc3a850e37-kube-api-access-v5gmm\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.146969 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " pod="openstack/rabbitmq-server-0" Sep 30 12:35:15 crc kubenswrapper[5002]: I0930 12:35:15.262014 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 12:35:16 crc kubenswrapper[5002]: W0930 12:35:16.802630 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf69d7010_1d73_410f_a5f1_5aed01494f42.slice/crio-beedc2c2bb93ac57e391c26246b9c667ce4dc4c25fa05455227ab0ff2dabeaca WatchSource:0}: Error finding container beedc2c2bb93ac57e391c26246b9c667ce4dc4c25fa05455227ab0ff2dabeaca: Status 404 returned error can't find the container with id beedc2c2bb93ac57e391c26246b9c667ce4dc4c25fa05455227ab0ff2dabeaca Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.470107 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-dkhh2" event={"ID":"dde3e2f9-15fd-4f59-8570-02fa37c770e0","Type":"ContainerStarted","Data":"105800949bc2720c1bf784ca1b9056066c1774e8a88a336dbcb2c9adbe0b9f8d"} Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.471238 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-mp9f9" event={"ID":"f69d7010-1d73-410f-a5f1-5aed01494f42","Type":"ContainerStarted","Data":"beedc2c2bb93ac57e391c26246b9c667ce4dc4c25fa05455227ab0ff2dabeaca"} Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.603760 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.605589 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.607632 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-w8fgp" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.608022 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.608919 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.609326 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.609690 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.624116 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.624880 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.631255 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.632984 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.635407 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-qs9nv" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.635756 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.635872 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.635994 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.643091 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.669602 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smm6s\" (UniqueName: \"kubernetes.io/projected/9d5d9337-f90f-4576-94dc-805d4e653801-kube-api-access-smm6s\") pod \"openstack-galera-0\" (UID: \"9d5d9337-f90f-4576-94dc-805d4e653801\") " pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.669648 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-0\" (UID: \"9d5d9337-f90f-4576-94dc-805d4e653801\") " pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.669679 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9d5d9337-f90f-4576-94dc-805d4e653801-config-data-default\") pod \"openstack-galera-0\" (UID: \"9d5d9337-f90f-4576-94dc-805d4e653801\") " pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.669701 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d5d9337-f90f-4576-94dc-805d4e653801-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"9d5d9337-f90f-4576-94dc-805d4e653801\") " pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.669717 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/9d5d9337-f90f-4576-94dc-805d4e653801-secrets\") pod \"openstack-galera-0\" (UID: \"9d5d9337-f90f-4576-94dc-805d4e653801\") " pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.669732 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9d5d9337-f90f-4576-94dc-805d4e653801-operator-scripts\") pod \"openstack-galera-0\" (UID: \"9d5d9337-f90f-4576-94dc-805d4e653801\") " pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.669755 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9d5d9337-f90f-4576-94dc-805d4e653801-kolla-config\") pod \"openstack-galera-0\" (UID: \"9d5d9337-f90f-4576-94dc-805d4e653801\") " pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.669776 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9d5d9337-f90f-4576-94dc-805d4e653801-config-data-generated\") pod \"openstack-galera-0\" (UID: \"9d5d9337-f90f-4576-94dc-805d4e653801\") " pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.669790 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d5d9337-f90f-4576-94dc-805d4e653801-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"9d5d9337-f90f-4576-94dc-805d4e653801\") " pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.771724 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9d5d9337-f90f-4576-94dc-805d4e653801-kolla-config\") pod \"openstack-galera-0\" (UID: \"9d5d9337-f90f-4576-94dc-805d4e653801\") " pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.772064 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d5d9337-f90f-4576-94dc-805d4e653801-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"9d5d9337-f90f-4576-94dc-805d4e653801\") " pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.772207 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9d5d9337-f90f-4576-94dc-805d4e653801-config-data-generated\") pod \"openstack-galera-0\" (UID: \"9d5d9337-f90f-4576-94dc-805d4e653801\") " pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.772349 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e109d20d-1925-4779-bbff-50bd39214d34-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"e109d20d-1925-4779-bbff-50bd39214d34\") " pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.772727 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e109d20d-1925-4779-bbff-50bd39214d34-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"e109d20d-1925-4779-bbff-50bd39214d34\") " pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.772492 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9d5d9337-f90f-4576-94dc-805d4e653801-kolla-config\") pod \"openstack-galera-0\" (UID: \"9d5d9337-f90f-4576-94dc-805d4e653801\") " pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.773037 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9d5d9337-f90f-4576-94dc-805d4e653801-config-data-generated\") pod \"openstack-galera-0\" (UID: \"9d5d9337-f90f-4576-94dc-805d4e653801\") " pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.773989 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"e109d20d-1925-4779-bbff-50bd39214d34\") " pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.774199 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e109d20d-1925-4779-bbff-50bd39214d34-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"e109d20d-1925-4779-bbff-50bd39214d34\") " pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.774316 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smm6s\" (UniqueName: \"kubernetes.io/projected/9d5d9337-f90f-4576-94dc-805d4e653801-kube-api-access-smm6s\") pod \"openstack-galera-0\" (UID: \"9d5d9337-f90f-4576-94dc-805d4e653801\") " pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.774404 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e109d20d-1925-4779-bbff-50bd39214d34-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"e109d20d-1925-4779-bbff-50bd39214d34\") " pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.774552 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e109d20d-1925-4779-bbff-50bd39214d34-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"e109d20d-1925-4779-bbff-50bd39214d34\") " pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.774666 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-0\" (UID: \"9d5d9337-f90f-4576-94dc-805d4e653801\") " pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.774777 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/e109d20d-1925-4779-bbff-50bd39214d34-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"e109d20d-1925-4779-bbff-50bd39214d34\") " pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.774839 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-0\" (UID: \"9d5d9337-f90f-4576-94dc-805d4e653801\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.774963 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9d5d9337-f90f-4576-94dc-805d4e653801-config-data-default\") pod \"openstack-galera-0\" (UID: \"9d5d9337-f90f-4576-94dc-805d4e653801\") " pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.774993 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sq4dz\" (UniqueName: \"kubernetes.io/projected/e109d20d-1925-4779-bbff-50bd39214d34-kube-api-access-sq4dz\") pod \"openstack-cell1-galera-0\" (UID: \"e109d20d-1925-4779-bbff-50bd39214d34\") " pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.775014 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d5d9337-f90f-4576-94dc-805d4e653801-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"9d5d9337-f90f-4576-94dc-805d4e653801\") " pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.775030 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/9d5d9337-f90f-4576-94dc-805d4e653801-secrets\") pod \"openstack-galera-0\" (UID: \"9d5d9337-f90f-4576-94dc-805d4e653801\") " pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.775048 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e109d20d-1925-4779-bbff-50bd39214d34-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"e109d20d-1925-4779-bbff-50bd39214d34\") " pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.775067 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9d5d9337-f90f-4576-94dc-805d4e653801-operator-scripts\") pod \"openstack-galera-0\" (UID: \"9d5d9337-f90f-4576-94dc-805d4e653801\") " pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.776095 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9d5d9337-f90f-4576-94dc-805d4e653801-operator-scripts\") pod \"openstack-galera-0\" (UID: \"9d5d9337-f90f-4576-94dc-805d4e653801\") " pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.776420 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9d5d9337-f90f-4576-94dc-805d4e653801-config-data-default\") pod \"openstack-galera-0\" (UID: \"9d5d9337-f90f-4576-94dc-805d4e653801\") " pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.777026 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d5d9337-f90f-4576-94dc-805d4e653801-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"9d5d9337-f90f-4576-94dc-805d4e653801\") " pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.778258 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/9d5d9337-f90f-4576-94dc-805d4e653801-secrets\") pod \"openstack-galera-0\" (UID: \"9d5d9337-f90f-4576-94dc-805d4e653801\") " pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.789555 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d5d9337-f90f-4576-94dc-805d4e653801-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"9d5d9337-f90f-4576-94dc-805d4e653801\") " pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.798045 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smm6s\" (UniqueName: \"kubernetes.io/projected/9d5d9337-f90f-4576-94dc-805d4e653801-kube-api-access-smm6s\") pod \"openstack-galera-0\" (UID: \"9d5d9337-f90f-4576-94dc-805d4e653801\") " pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.801365 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-0\" (UID: \"9d5d9337-f90f-4576-94dc-805d4e653801\") " pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.877075 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e109d20d-1925-4779-bbff-50bd39214d34-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"e109d20d-1925-4779-bbff-50bd39214d34\") " pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.877124 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e109d20d-1925-4779-bbff-50bd39214d34-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"e109d20d-1925-4779-bbff-50bd39214d34\") " pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.877170 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"e109d20d-1925-4779-bbff-50bd39214d34\") " pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.877193 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e109d20d-1925-4779-bbff-50bd39214d34-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"e109d20d-1925-4779-bbff-50bd39214d34\") " pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.877220 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e109d20d-1925-4779-bbff-50bd39214d34-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"e109d20d-1925-4779-bbff-50bd39214d34\") " pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.877241 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e109d20d-1925-4779-bbff-50bd39214d34-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"e109d20d-1925-4779-bbff-50bd39214d34\") " pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.877270 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/e109d20d-1925-4779-bbff-50bd39214d34-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"e109d20d-1925-4779-bbff-50bd39214d34\") " pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.877293 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sq4dz\" (UniqueName: \"kubernetes.io/projected/e109d20d-1925-4779-bbff-50bd39214d34-kube-api-access-sq4dz\") pod \"openstack-cell1-galera-0\" (UID: \"e109d20d-1925-4779-bbff-50bd39214d34\") " pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.877312 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e109d20d-1925-4779-bbff-50bd39214d34-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"e109d20d-1925-4779-bbff-50bd39214d34\") " pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.878251 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e109d20d-1925-4779-bbff-50bd39214d34-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"e109d20d-1925-4779-bbff-50bd39214d34\") " pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.879269 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e109d20d-1925-4779-bbff-50bd39214d34-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"e109d20d-1925-4779-bbff-50bd39214d34\") " pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.879685 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e109d20d-1925-4779-bbff-50bd39214d34-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"e109d20d-1925-4779-bbff-50bd39214d34\") " pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.879797 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"e109d20d-1925-4779-bbff-50bd39214d34\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.880765 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e109d20d-1925-4779-bbff-50bd39214d34-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"e109d20d-1925-4779-bbff-50bd39214d34\") " pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.883738 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e109d20d-1925-4779-bbff-50bd39214d34-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"e109d20d-1925-4779-bbff-50bd39214d34\") " pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.884310 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/e109d20d-1925-4779-bbff-50bd39214d34-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"e109d20d-1925-4779-bbff-50bd39214d34\") " pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.884663 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e109d20d-1925-4779-bbff-50bd39214d34-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"e109d20d-1925-4779-bbff-50bd39214d34\") " pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.900915 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sq4dz\" (UniqueName: \"kubernetes.io/projected/e109d20d-1925-4779-bbff-50bd39214d34-kube-api-access-sq4dz\") pod \"openstack-cell1-galera-0\" (UID: \"e109d20d-1925-4779-bbff-50bd39214d34\") " pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.901019 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"e109d20d-1925-4779-bbff-50bd39214d34\") " pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.949749 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 30 12:35:17 crc kubenswrapper[5002]: I0930 12:35:17.961826 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:18 crc kubenswrapper[5002]: I0930 12:35:18.196713 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Sep 30 12:35:18 crc kubenswrapper[5002]: I0930 12:35:18.204645 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 30 12:35:18 crc kubenswrapper[5002]: I0930 12:35:18.211932 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Sep 30 12:35:18 crc kubenswrapper[5002]: I0930 12:35:18.211939 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Sep 30 12:35:18 crc kubenswrapper[5002]: I0930 12:35:18.212073 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-5jszb" Sep 30 12:35:18 crc kubenswrapper[5002]: I0930 12:35:18.233347 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 30 12:35:18 crc kubenswrapper[5002]: I0930 12:35:18.285221 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7864c645-ac32-48bb-a292-0ff4ec2a5955-kolla-config\") pod \"memcached-0\" (UID: \"7864c645-ac32-48bb-a292-0ff4ec2a5955\") " pod="openstack/memcached-0" Sep 30 12:35:18 crc kubenswrapper[5002]: I0930 12:35:18.285438 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7864c645-ac32-48bb-a292-0ff4ec2a5955-config-data\") pod \"memcached-0\" (UID: \"7864c645-ac32-48bb-a292-0ff4ec2a5955\") " pod="openstack/memcached-0" Sep 30 12:35:18 crc kubenswrapper[5002]: I0930 12:35:18.285544 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7864c645-ac32-48bb-a292-0ff4ec2a5955-combined-ca-bundle\") pod \"memcached-0\" (UID: \"7864c645-ac32-48bb-a292-0ff4ec2a5955\") " pod="openstack/memcached-0" Sep 30 12:35:18 crc kubenswrapper[5002]: I0930 12:35:18.285649 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4fvd\" (UniqueName: \"kubernetes.io/projected/7864c645-ac32-48bb-a292-0ff4ec2a5955-kube-api-access-x4fvd\") pod \"memcached-0\" (UID: \"7864c645-ac32-48bb-a292-0ff4ec2a5955\") " pod="openstack/memcached-0" Sep 30 12:35:18 crc kubenswrapper[5002]: I0930 12:35:18.285761 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/7864c645-ac32-48bb-a292-0ff4ec2a5955-memcached-tls-certs\") pod \"memcached-0\" (UID: \"7864c645-ac32-48bb-a292-0ff4ec2a5955\") " pod="openstack/memcached-0" Sep 30 12:35:18 crc kubenswrapper[5002]: I0930 12:35:18.391183 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/7864c645-ac32-48bb-a292-0ff4ec2a5955-memcached-tls-certs\") pod \"memcached-0\" (UID: \"7864c645-ac32-48bb-a292-0ff4ec2a5955\") " pod="openstack/memcached-0" Sep 30 12:35:18 crc kubenswrapper[5002]: I0930 12:35:18.391253 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7864c645-ac32-48bb-a292-0ff4ec2a5955-kolla-config\") pod \"memcached-0\" (UID: \"7864c645-ac32-48bb-a292-0ff4ec2a5955\") " pod="openstack/memcached-0" Sep 30 12:35:18 crc kubenswrapper[5002]: I0930 12:35:18.391294 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7864c645-ac32-48bb-a292-0ff4ec2a5955-config-data\") pod \"memcached-0\" (UID: \"7864c645-ac32-48bb-a292-0ff4ec2a5955\") " pod="openstack/memcached-0" Sep 30 12:35:18 crc kubenswrapper[5002]: I0930 12:35:18.391318 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7864c645-ac32-48bb-a292-0ff4ec2a5955-combined-ca-bundle\") pod \"memcached-0\" (UID: \"7864c645-ac32-48bb-a292-0ff4ec2a5955\") " pod="openstack/memcached-0" Sep 30 12:35:18 crc kubenswrapper[5002]: I0930 12:35:18.391332 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4fvd\" (UniqueName: \"kubernetes.io/projected/7864c645-ac32-48bb-a292-0ff4ec2a5955-kube-api-access-x4fvd\") pod \"memcached-0\" (UID: \"7864c645-ac32-48bb-a292-0ff4ec2a5955\") " pod="openstack/memcached-0" Sep 30 12:35:18 crc kubenswrapper[5002]: I0930 12:35:18.392114 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7864c645-ac32-48bb-a292-0ff4ec2a5955-kolla-config\") pod \"memcached-0\" (UID: \"7864c645-ac32-48bb-a292-0ff4ec2a5955\") " pod="openstack/memcached-0" Sep 30 12:35:18 crc kubenswrapper[5002]: I0930 12:35:18.392250 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7864c645-ac32-48bb-a292-0ff4ec2a5955-config-data\") pod \"memcached-0\" (UID: \"7864c645-ac32-48bb-a292-0ff4ec2a5955\") " pod="openstack/memcached-0" Sep 30 12:35:18 crc kubenswrapper[5002]: I0930 12:35:18.399096 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/7864c645-ac32-48bb-a292-0ff4ec2a5955-memcached-tls-certs\") pod \"memcached-0\" (UID: \"7864c645-ac32-48bb-a292-0ff4ec2a5955\") " pod="openstack/memcached-0" Sep 30 12:35:18 crc kubenswrapper[5002]: I0930 12:35:18.410377 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7864c645-ac32-48bb-a292-0ff4ec2a5955-combined-ca-bundle\") pod \"memcached-0\" (UID: \"7864c645-ac32-48bb-a292-0ff4ec2a5955\") " pod="openstack/memcached-0" Sep 30 12:35:18 crc kubenswrapper[5002]: I0930 12:35:18.440946 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4fvd\" (UniqueName: \"kubernetes.io/projected/7864c645-ac32-48bb-a292-0ff4ec2a5955-kube-api-access-x4fvd\") pod \"memcached-0\" (UID: \"7864c645-ac32-48bb-a292-0ff4ec2a5955\") " pod="openstack/memcached-0" Sep 30 12:35:18 crc kubenswrapper[5002]: I0930 12:35:18.520903 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 30 12:35:20 crc kubenswrapper[5002]: I0930 12:35:20.015230 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 12:35:20 crc kubenswrapper[5002]: I0930 12:35:20.016536 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 12:35:20 crc kubenswrapper[5002]: I0930 12:35:20.021320 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-g4tvz" Sep 30 12:35:20 crc kubenswrapper[5002]: I0930 12:35:20.025491 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 12:35:20 crc kubenswrapper[5002]: I0930 12:35:20.115608 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6l25\" (UniqueName: \"kubernetes.io/projected/0fe7ac7f-c7a0-42a5-8089-059c05c40d3e-kube-api-access-z6l25\") pod \"kube-state-metrics-0\" (UID: \"0fe7ac7f-c7a0-42a5-8089-059c05c40d3e\") " pod="openstack/kube-state-metrics-0" Sep 30 12:35:20 crc kubenswrapper[5002]: I0930 12:35:20.217931 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6l25\" (UniqueName: \"kubernetes.io/projected/0fe7ac7f-c7a0-42a5-8089-059c05c40d3e-kube-api-access-z6l25\") pod \"kube-state-metrics-0\" (UID: \"0fe7ac7f-c7a0-42a5-8089-059c05c40d3e\") " pod="openstack/kube-state-metrics-0" Sep 30 12:35:20 crc kubenswrapper[5002]: I0930 12:35:20.235053 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6l25\" (UniqueName: \"kubernetes.io/projected/0fe7ac7f-c7a0-42a5-8089-059c05c40d3e-kube-api-access-z6l25\") pod \"kube-state-metrics-0\" (UID: \"0fe7ac7f-c7a0-42a5-8089-059c05c40d3e\") " pod="openstack/kube-state-metrics-0" Sep 30 12:35:20 crc kubenswrapper[5002]: I0930 12:35:20.336253 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.525095 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-qq5zn"] Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.544367 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-qq5zn" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.546755 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.547557 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-z422r" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.547840 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.575579 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b699340-4bbd-4df4-951b-9404b0545d24-combined-ca-bundle\") pod \"ovn-controller-qq5zn\" (UID: \"6b699340-4bbd-4df4-951b-9404b0545d24\") " pod="openstack/ovn-controller-qq5zn" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.575618 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kvq7\" (UniqueName: \"kubernetes.io/projected/6b699340-4bbd-4df4-951b-9404b0545d24-kube-api-access-6kvq7\") pod \"ovn-controller-qq5zn\" (UID: \"6b699340-4bbd-4df4-951b-9404b0545d24\") " pod="openstack/ovn-controller-qq5zn" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.575649 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b699340-4bbd-4df4-951b-9404b0545d24-ovn-controller-tls-certs\") pod \"ovn-controller-qq5zn\" (UID: \"6b699340-4bbd-4df4-951b-9404b0545d24\") " pod="openstack/ovn-controller-qq5zn" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.575720 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6b699340-4bbd-4df4-951b-9404b0545d24-var-log-ovn\") pod \"ovn-controller-qq5zn\" (UID: \"6b699340-4bbd-4df4-951b-9404b0545d24\") " pod="openstack/ovn-controller-qq5zn" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.575772 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6b699340-4bbd-4df4-951b-9404b0545d24-var-run-ovn\") pod \"ovn-controller-qq5zn\" (UID: \"6b699340-4bbd-4df4-951b-9404b0545d24\") " pod="openstack/ovn-controller-qq5zn" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.575788 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6b699340-4bbd-4df4-951b-9404b0545d24-scripts\") pod \"ovn-controller-qq5zn\" (UID: \"6b699340-4bbd-4df4-951b-9404b0545d24\") " pod="openstack/ovn-controller-qq5zn" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.575824 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6b699340-4bbd-4df4-951b-9404b0545d24-var-run\") pod \"ovn-controller-qq5zn\" (UID: \"6b699340-4bbd-4df4-951b-9404b0545d24\") " pod="openstack/ovn-controller-qq5zn" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.578559 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-5m55z"] Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.581899 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-5m55z" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.602597 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-qq5zn"] Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.615732 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-5m55z"] Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.677274 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b699340-4bbd-4df4-951b-9404b0545d24-combined-ca-bundle\") pod \"ovn-controller-qq5zn\" (UID: \"6b699340-4bbd-4df4-951b-9404b0545d24\") " pod="openstack/ovn-controller-qq5zn" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.677371 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kvq7\" (UniqueName: \"kubernetes.io/projected/6b699340-4bbd-4df4-951b-9404b0545d24-kube-api-access-6kvq7\") pod \"ovn-controller-qq5zn\" (UID: \"6b699340-4bbd-4df4-951b-9404b0545d24\") " pod="openstack/ovn-controller-qq5zn" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.677392 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b699340-4bbd-4df4-951b-9404b0545d24-ovn-controller-tls-certs\") pod \"ovn-controller-qq5zn\" (UID: \"6b699340-4bbd-4df4-951b-9404b0545d24\") " pod="openstack/ovn-controller-qq5zn" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.677436 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9d2f8d72-e223-4f96-917e-7b47baba18d8-scripts\") pod \"ovn-controller-ovs-5m55z\" (UID: \"9d2f8d72-e223-4f96-917e-7b47baba18d8\") " pod="openstack/ovn-controller-ovs-5m55z" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.677462 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/9d2f8d72-e223-4f96-917e-7b47baba18d8-var-log\") pod \"ovn-controller-ovs-5m55z\" (UID: \"9d2f8d72-e223-4f96-917e-7b47baba18d8\") " pod="openstack/ovn-controller-ovs-5m55z" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.677513 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6b699340-4bbd-4df4-951b-9404b0545d24-var-log-ovn\") pod \"ovn-controller-qq5zn\" (UID: \"6b699340-4bbd-4df4-951b-9404b0545d24\") " pod="openstack/ovn-controller-qq5zn" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.677542 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgtl9\" (UniqueName: \"kubernetes.io/projected/9d2f8d72-e223-4f96-917e-7b47baba18d8-kube-api-access-rgtl9\") pod \"ovn-controller-ovs-5m55z\" (UID: \"9d2f8d72-e223-4f96-917e-7b47baba18d8\") " pod="openstack/ovn-controller-ovs-5m55z" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.677560 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9d2f8d72-e223-4f96-917e-7b47baba18d8-var-run\") pod \"ovn-controller-ovs-5m55z\" (UID: \"9d2f8d72-e223-4f96-917e-7b47baba18d8\") " pod="openstack/ovn-controller-ovs-5m55z" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.677581 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6b699340-4bbd-4df4-951b-9404b0545d24-var-run-ovn\") pod \"ovn-controller-qq5zn\" (UID: \"6b699340-4bbd-4df4-951b-9404b0545d24\") " pod="openstack/ovn-controller-qq5zn" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.677599 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6b699340-4bbd-4df4-951b-9404b0545d24-scripts\") pod \"ovn-controller-qq5zn\" (UID: \"6b699340-4bbd-4df4-951b-9404b0545d24\") " pod="openstack/ovn-controller-qq5zn" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.677625 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/9d2f8d72-e223-4f96-917e-7b47baba18d8-var-lib\") pod \"ovn-controller-ovs-5m55z\" (UID: \"9d2f8d72-e223-4f96-917e-7b47baba18d8\") " pod="openstack/ovn-controller-ovs-5m55z" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.677641 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6b699340-4bbd-4df4-951b-9404b0545d24-var-run\") pod \"ovn-controller-qq5zn\" (UID: \"6b699340-4bbd-4df4-951b-9404b0545d24\") " pod="openstack/ovn-controller-qq5zn" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.677665 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/9d2f8d72-e223-4f96-917e-7b47baba18d8-etc-ovs\") pod \"ovn-controller-ovs-5m55z\" (UID: \"9d2f8d72-e223-4f96-917e-7b47baba18d8\") " pod="openstack/ovn-controller-ovs-5m55z" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.678357 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6b699340-4bbd-4df4-951b-9404b0545d24-var-log-ovn\") pod \"ovn-controller-qq5zn\" (UID: \"6b699340-4bbd-4df4-951b-9404b0545d24\") " pod="openstack/ovn-controller-qq5zn" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.678494 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6b699340-4bbd-4df4-951b-9404b0545d24-var-run-ovn\") pod \"ovn-controller-qq5zn\" (UID: \"6b699340-4bbd-4df4-951b-9404b0545d24\") " pod="openstack/ovn-controller-qq5zn" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.680787 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6b699340-4bbd-4df4-951b-9404b0545d24-var-run\") pod \"ovn-controller-qq5zn\" (UID: \"6b699340-4bbd-4df4-951b-9404b0545d24\") " pod="openstack/ovn-controller-qq5zn" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.681083 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6b699340-4bbd-4df4-951b-9404b0545d24-scripts\") pod \"ovn-controller-qq5zn\" (UID: \"6b699340-4bbd-4df4-951b-9404b0545d24\") " pod="openstack/ovn-controller-qq5zn" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.684925 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b699340-4bbd-4df4-951b-9404b0545d24-ovn-controller-tls-certs\") pod \"ovn-controller-qq5zn\" (UID: \"6b699340-4bbd-4df4-951b-9404b0545d24\") " pod="openstack/ovn-controller-qq5zn" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.685328 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b699340-4bbd-4df4-951b-9404b0545d24-combined-ca-bundle\") pod \"ovn-controller-qq5zn\" (UID: \"6b699340-4bbd-4df4-951b-9404b0545d24\") " pod="openstack/ovn-controller-qq5zn" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.703887 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kvq7\" (UniqueName: \"kubernetes.io/projected/6b699340-4bbd-4df4-951b-9404b0545d24-kube-api-access-6kvq7\") pod \"ovn-controller-qq5zn\" (UID: \"6b699340-4bbd-4df4-951b-9404b0545d24\") " pod="openstack/ovn-controller-qq5zn" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.782628 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgtl9\" (UniqueName: \"kubernetes.io/projected/9d2f8d72-e223-4f96-917e-7b47baba18d8-kube-api-access-rgtl9\") pod \"ovn-controller-ovs-5m55z\" (UID: \"9d2f8d72-e223-4f96-917e-7b47baba18d8\") " pod="openstack/ovn-controller-ovs-5m55z" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.782763 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9d2f8d72-e223-4f96-917e-7b47baba18d8-var-run\") pod \"ovn-controller-ovs-5m55z\" (UID: \"9d2f8d72-e223-4f96-917e-7b47baba18d8\") " pod="openstack/ovn-controller-ovs-5m55z" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.782905 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/9d2f8d72-e223-4f96-917e-7b47baba18d8-var-lib\") pod \"ovn-controller-ovs-5m55z\" (UID: \"9d2f8d72-e223-4f96-917e-7b47baba18d8\") " pod="openstack/ovn-controller-ovs-5m55z" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.783025 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/9d2f8d72-e223-4f96-917e-7b47baba18d8-etc-ovs\") pod \"ovn-controller-ovs-5m55z\" (UID: \"9d2f8d72-e223-4f96-917e-7b47baba18d8\") " pod="openstack/ovn-controller-ovs-5m55z" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.783188 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9d2f8d72-e223-4f96-917e-7b47baba18d8-scripts\") pod \"ovn-controller-ovs-5m55z\" (UID: \"9d2f8d72-e223-4f96-917e-7b47baba18d8\") " pod="openstack/ovn-controller-ovs-5m55z" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.783254 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/9d2f8d72-e223-4f96-917e-7b47baba18d8-var-log\") pod \"ovn-controller-ovs-5m55z\" (UID: \"9d2f8d72-e223-4f96-917e-7b47baba18d8\") " pod="openstack/ovn-controller-ovs-5m55z" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.783620 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/9d2f8d72-e223-4f96-917e-7b47baba18d8-var-log\") pod \"ovn-controller-ovs-5m55z\" (UID: \"9d2f8d72-e223-4f96-917e-7b47baba18d8\") " pod="openstack/ovn-controller-ovs-5m55z" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.784202 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9d2f8d72-e223-4f96-917e-7b47baba18d8-var-run\") pod \"ovn-controller-ovs-5m55z\" (UID: \"9d2f8d72-e223-4f96-917e-7b47baba18d8\") " pod="openstack/ovn-controller-ovs-5m55z" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.784382 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/9d2f8d72-e223-4f96-917e-7b47baba18d8-var-lib\") pod \"ovn-controller-ovs-5m55z\" (UID: \"9d2f8d72-e223-4f96-917e-7b47baba18d8\") " pod="openstack/ovn-controller-ovs-5m55z" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.786603 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9d2f8d72-e223-4f96-917e-7b47baba18d8-scripts\") pod \"ovn-controller-ovs-5m55z\" (UID: \"9d2f8d72-e223-4f96-917e-7b47baba18d8\") " pod="openstack/ovn-controller-ovs-5m55z" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.786823 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/9d2f8d72-e223-4f96-917e-7b47baba18d8-etc-ovs\") pod \"ovn-controller-ovs-5m55z\" (UID: \"9d2f8d72-e223-4f96-917e-7b47baba18d8\") " pod="openstack/ovn-controller-ovs-5m55z" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.812389 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgtl9\" (UniqueName: \"kubernetes.io/projected/9d2f8d72-e223-4f96-917e-7b47baba18d8-kube-api-access-rgtl9\") pod \"ovn-controller-ovs-5m55z\" (UID: \"9d2f8d72-e223-4f96-917e-7b47baba18d8\") " pod="openstack/ovn-controller-ovs-5m55z" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.869269 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-qq5zn" Sep 30 12:35:23 crc kubenswrapper[5002]: I0930 12:35:23.909122 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-5m55z" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.358378 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.360019 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.363258 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.363664 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.364028 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.364218 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.365281 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-wht8s" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.369860 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.413843 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e148ee1-66bc-4300-a27a-a8c4ce515d51-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"1e148ee1-66bc-4300-a27a-a8c4ce515d51\") " pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.413885 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"1e148ee1-66bc-4300-a27a-a8c4ce515d51\") " pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.413918 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bnmdd\" (UniqueName: \"kubernetes.io/projected/1e148ee1-66bc-4300-a27a-a8c4ce515d51-kube-api-access-bnmdd\") pod \"ovsdbserver-nb-0\" (UID: \"1e148ee1-66bc-4300-a27a-a8c4ce515d51\") " pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.413954 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1e148ee1-66bc-4300-a27a-a8c4ce515d51-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"1e148ee1-66bc-4300-a27a-a8c4ce515d51\") " pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.413993 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e148ee1-66bc-4300-a27a-a8c4ce515d51-config\") pod \"ovsdbserver-nb-0\" (UID: \"1e148ee1-66bc-4300-a27a-a8c4ce515d51\") " pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.414012 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e148ee1-66bc-4300-a27a-a8c4ce515d51-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"1e148ee1-66bc-4300-a27a-a8c4ce515d51\") " pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.414388 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1e148ee1-66bc-4300-a27a-a8c4ce515d51-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"1e148ee1-66bc-4300-a27a-a8c4ce515d51\") " pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.414441 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e148ee1-66bc-4300-a27a-a8c4ce515d51-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"1e148ee1-66bc-4300-a27a-a8c4ce515d51\") " pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.516606 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bnmdd\" (UniqueName: \"kubernetes.io/projected/1e148ee1-66bc-4300-a27a-a8c4ce515d51-kube-api-access-bnmdd\") pod \"ovsdbserver-nb-0\" (UID: \"1e148ee1-66bc-4300-a27a-a8c4ce515d51\") " pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.516718 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1e148ee1-66bc-4300-a27a-a8c4ce515d51-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"1e148ee1-66bc-4300-a27a-a8c4ce515d51\") " pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.516827 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e148ee1-66bc-4300-a27a-a8c4ce515d51-config\") pod \"ovsdbserver-nb-0\" (UID: \"1e148ee1-66bc-4300-a27a-a8c4ce515d51\") " pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.516861 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e148ee1-66bc-4300-a27a-a8c4ce515d51-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"1e148ee1-66bc-4300-a27a-a8c4ce515d51\") " pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.516914 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1e148ee1-66bc-4300-a27a-a8c4ce515d51-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"1e148ee1-66bc-4300-a27a-a8c4ce515d51\") " pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.516955 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e148ee1-66bc-4300-a27a-a8c4ce515d51-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"1e148ee1-66bc-4300-a27a-a8c4ce515d51\") " pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.517000 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e148ee1-66bc-4300-a27a-a8c4ce515d51-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"1e148ee1-66bc-4300-a27a-a8c4ce515d51\") " pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.517038 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"1e148ee1-66bc-4300-a27a-a8c4ce515d51\") " pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.517617 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"1e148ee1-66bc-4300-a27a-a8c4ce515d51\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.518022 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e148ee1-66bc-4300-a27a-a8c4ce515d51-config\") pod \"ovsdbserver-nb-0\" (UID: \"1e148ee1-66bc-4300-a27a-a8c4ce515d51\") " pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.518043 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1e148ee1-66bc-4300-a27a-a8c4ce515d51-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"1e148ee1-66bc-4300-a27a-a8c4ce515d51\") " pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.518559 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1e148ee1-66bc-4300-a27a-a8c4ce515d51-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"1e148ee1-66bc-4300-a27a-a8c4ce515d51\") " pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.523987 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e148ee1-66bc-4300-a27a-a8c4ce515d51-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"1e148ee1-66bc-4300-a27a-a8c4ce515d51\") " pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.525411 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e148ee1-66bc-4300-a27a-a8c4ce515d51-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"1e148ee1-66bc-4300-a27a-a8c4ce515d51\") " pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.527717 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e148ee1-66bc-4300-a27a-a8c4ce515d51-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"1e148ee1-66bc-4300-a27a-a8c4ce515d51\") " pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.534153 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bnmdd\" (UniqueName: \"kubernetes.io/projected/1e148ee1-66bc-4300-a27a-a8c4ce515d51-kube-api-access-bnmdd\") pod \"ovsdbserver-nb-0\" (UID: \"1e148ee1-66bc-4300-a27a-a8c4ce515d51\") " pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.547406 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"1e148ee1-66bc-4300-a27a-a8c4ce515d51\") " pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:25 crc kubenswrapper[5002]: I0930 12:35:25.712948 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:26 crc kubenswrapper[5002]: E0930 12:35:26.075575 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 30 12:35:26 crc kubenswrapper[5002]: E0930 12:35:26.075976 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ssb9m,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-gpdm5_openstack(9e945411-e51f-48af-ac88-de0fff99c16e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 12:35:26 crc kubenswrapper[5002]: E0930 12:35:26.077209 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-gpdm5" podUID="9e945411-e51f-48af-ac88-de0fff99c16e" Sep 30 12:35:26 crc kubenswrapper[5002]: E0930 12:35:26.097871 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 30 12:35:26 crc kubenswrapper[5002]: E0930 12:35:26.098045 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-n5xjq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-t7dh5_openstack(386f8b04-affa-45f6-b394-f4f83cda6cb7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 12:35:26 crc kubenswrapper[5002]: E0930 12:35:26.099260 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-t7dh5" podUID="386f8b04-affa-45f6-b394-f4f83cda6cb7" Sep 30 12:35:26 crc kubenswrapper[5002]: I0930 12:35:26.547917 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-dkhh2" event={"ID":"dde3e2f9-15fd-4f59-8570-02fa37c770e0","Type":"ContainerStarted","Data":"a8d151e1994f5fc5f79282f383a2f88900d2d9c7a4e1c149a05246a5d6fc4fda"} Sep 30 12:35:26 crc kubenswrapper[5002]: I0930 12:35:26.550234 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-mp9f9" event={"ID":"f69d7010-1d73-410f-a5f1-5aed01494f42","Type":"ContainerStarted","Data":"98bf594c4f007832b8dda391a700a183219a5fe942ba5f33059cbfd1f1fb7476"} Sep 30 12:35:26 crc kubenswrapper[5002]: I0930 12:35:26.711863 5002 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 12:35:26 crc kubenswrapper[5002]: I0930 12:35:26.720347 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 12:35:26 crc kubenswrapper[5002]: I0930 12:35:26.725304 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.094205 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-t7dh5" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.124552 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.126018 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.131092 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.131310 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.133434 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.133510 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-c8dmn" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.139131 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.145938 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/386f8b04-affa-45f6-b394-f4f83cda6cb7-config\") pod \"386f8b04-affa-45f6-b394-f4f83cda6cb7\" (UID: \"386f8b04-affa-45f6-b394-f4f83cda6cb7\") " Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.146066 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5xjq\" (UniqueName: \"kubernetes.io/projected/386f8b04-affa-45f6-b394-f4f83cda6cb7-kube-api-access-n5xjq\") pod \"386f8b04-affa-45f6-b394-f4f83cda6cb7\" (UID: \"386f8b04-affa-45f6-b394-f4f83cda6cb7\") " Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.146099 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/386f8b04-affa-45f6-b394-f4f83cda6cb7-dns-svc\") pod \"386f8b04-affa-45f6-b394-f4f83cda6cb7\" (UID: \"386f8b04-affa-45f6-b394-f4f83cda6cb7\") " Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.146607 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/386f8b04-affa-45f6-b394-f4f83cda6cb7-config" (OuterVolumeSpecName: "config") pod "386f8b04-affa-45f6-b394-f4f83cda6cb7" (UID: "386f8b04-affa-45f6-b394-f4f83cda6cb7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.147265 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/386f8b04-affa-45f6-b394-f4f83cda6cb7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "386f8b04-affa-45f6-b394-f4f83cda6cb7" (UID: "386f8b04-affa-45f6-b394-f4f83cda6cb7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.147382 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7304f60c-c750-409f-bca4-4fd12c239891-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7304f60c-c750-409f-bca4-4fd12c239891\") " pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.147491 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7304f60c-c750-409f-bca4-4fd12c239891-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"7304f60c-c750-409f-bca4-4fd12c239891\") " pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.150153 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.152322 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jgzrl\" (UniqueName: \"kubernetes.io/projected/7304f60c-c750-409f-bca4-4fd12c239891-kube-api-access-jgzrl\") pod \"ovsdbserver-sb-0\" (UID: \"7304f60c-c750-409f-bca4-4fd12c239891\") " pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.154687 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7304f60c-c750-409f-bca4-4fd12c239891-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"7304f60c-c750-409f-bca4-4fd12c239891\") " pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.154845 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7304f60c-c750-409f-bca4-4fd12c239891-config\") pod \"ovsdbserver-sb-0\" (UID: \"7304f60c-c750-409f-bca4-4fd12c239891\") " pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.154905 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7304f60c-c750-409f-bca4-4fd12c239891-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7304f60c-c750-409f-bca4-4fd12c239891\") " pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.154984 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7304f60c-c750-409f-bca4-4fd12c239891-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"7304f60c-c750-409f-bca4-4fd12c239891\") " pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.155004 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-sb-0\" (UID: \"7304f60c-c750-409f-bca4-4fd12c239891\") " pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.156121 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/386f8b04-affa-45f6-b394-f4f83cda6cb7-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.156253 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/386f8b04-affa-45f6-b394-f4f83cda6cb7-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.156199 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/386f8b04-affa-45f6-b394-f4f83cda6cb7-kube-api-access-n5xjq" (OuterVolumeSpecName: "kube-api-access-n5xjq") pod "386f8b04-affa-45f6-b394-f4f83cda6cb7" (UID: "386f8b04-affa-45f6-b394-f4f83cda6cb7"). InnerVolumeSpecName "kube-api-access-n5xjq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.170553 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-qq5zn"] Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.199174 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.203681 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.206616 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-gpdm5" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.208315 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 12:35:27 crc kubenswrapper[5002]: W0930 12:35:27.208942 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf80ae063_860a_4997_9c9f_57bc3a850e37.slice/crio-3eaace2f9b0aed40df6a26c28fe5f12235e819b65e60e1d4aa62569ea4beb30a WatchSource:0}: Error finding container 3eaace2f9b0aed40df6a26c28fe5f12235e819b65e60e1d4aa62569ea4beb30a: Status 404 returned error can't find the container with id 3eaace2f9b0aed40df6a26c28fe5f12235e819b65e60e1d4aa62569ea4beb30a Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.257689 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ssb9m\" (UniqueName: \"kubernetes.io/projected/9e945411-e51f-48af-ac88-de0fff99c16e-kube-api-access-ssb9m\") pod \"9e945411-e51f-48af-ac88-de0fff99c16e\" (UID: \"9e945411-e51f-48af-ac88-de0fff99c16e\") " Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.257771 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e945411-e51f-48af-ac88-de0fff99c16e-config\") pod \"9e945411-e51f-48af-ac88-de0fff99c16e\" (UID: \"9e945411-e51f-48af-ac88-de0fff99c16e\") " Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.258133 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7304f60c-c750-409f-bca4-4fd12c239891-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"7304f60c-c750-409f-bca4-4fd12c239891\") " pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.258171 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jgzrl\" (UniqueName: \"kubernetes.io/projected/7304f60c-c750-409f-bca4-4fd12c239891-kube-api-access-jgzrl\") pod \"ovsdbserver-sb-0\" (UID: \"7304f60c-c750-409f-bca4-4fd12c239891\") " pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.258214 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7304f60c-c750-409f-bca4-4fd12c239891-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"7304f60c-c750-409f-bca4-4fd12c239891\") " pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.258256 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7304f60c-c750-409f-bca4-4fd12c239891-config\") pod \"ovsdbserver-sb-0\" (UID: \"7304f60c-c750-409f-bca4-4fd12c239891\") " pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.258312 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7304f60c-c750-409f-bca4-4fd12c239891-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7304f60c-c750-409f-bca4-4fd12c239891\") " pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.258370 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7304f60c-c750-409f-bca4-4fd12c239891-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"7304f60c-c750-409f-bca4-4fd12c239891\") " pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.258397 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-sb-0\" (UID: \"7304f60c-c750-409f-bca4-4fd12c239891\") " pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.258505 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7304f60c-c750-409f-bca4-4fd12c239891-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7304f60c-c750-409f-bca4-4fd12c239891\") " pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.258602 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5xjq\" (UniqueName: \"kubernetes.io/projected/386f8b04-affa-45f6-b394-f4f83cda6cb7-kube-api-access-n5xjq\") on node \"crc\" DevicePath \"\"" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.259671 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7304f60c-c750-409f-bca4-4fd12c239891-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"7304f60c-c750-409f-bca4-4fd12c239891\") " pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.260586 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e945411-e51f-48af-ac88-de0fff99c16e-config" (OuterVolumeSpecName: "config") pod "9e945411-e51f-48af-ac88-de0fff99c16e" (UID: "9e945411-e51f-48af-ac88-de0fff99c16e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.261370 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-sb-0\" (UID: \"7304f60c-c750-409f-bca4-4fd12c239891\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.261843 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7304f60c-c750-409f-bca4-4fd12c239891-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"7304f60c-c750-409f-bca4-4fd12c239891\") " pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.262145 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7304f60c-c750-409f-bca4-4fd12c239891-config\") pod \"ovsdbserver-sb-0\" (UID: \"7304f60c-c750-409f-bca4-4fd12c239891\") " pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.263273 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e945411-e51f-48af-ac88-de0fff99c16e-kube-api-access-ssb9m" (OuterVolumeSpecName: "kube-api-access-ssb9m") pod "9e945411-e51f-48af-ac88-de0fff99c16e" (UID: "9e945411-e51f-48af-ac88-de0fff99c16e"). InnerVolumeSpecName "kube-api-access-ssb9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.264823 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7304f60c-c750-409f-bca4-4fd12c239891-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7304f60c-c750-409f-bca4-4fd12c239891\") " pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.266146 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7304f60c-c750-409f-bca4-4fd12c239891-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"7304f60c-c750-409f-bca4-4fd12c239891\") " pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.270097 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7304f60c-c750-409f-bca4-4fd12c239891-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7304f60c-c750-409f-bca4-4fd12c239891\") " pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.278989 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jgzrl\" (UniqueName: \"kubernetes.io/projected/7304f60c-c750-409f-bca4-4fd12c239891-kube-api-access-jgzrl\") pod \"ovsdbserver-sb-0\" (UID: \"7304f60c-c750-409f-bca4-4fd12c239891\") " pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.300737 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-sb-0\" (UID: \"7304f60c-c750-409f-bca4-4fd12c239891\") " pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.341055 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.360152 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ssb9m\" (UniqueName: \"kubernetes.io/projected/9e945411-e51f-48af-ac88-de0fff99c16e-kube-api-access-ssb9m\") on node \"crc\" DevicePath \"\"" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.360187 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e945411-e51f-48af-ac88-de0fff99c16e-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.450244 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.560002 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"e109d20d-1925-4779-bbff-50bd39214d34","Type":"ContainerStarted","Data":"e527e3febaa8af21f198ec7841fcfaa5f8c5585d9fb544da5d46231bd68b5321"} Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.562695 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"7864c645-ac32-48bb-a292-0ff4ec2a5955","Type":"ContainerStarted","Data":"26b26fbbc884afcde1007b6d2a8c33b9e8e975f95edb93270fb03d630e251afa"} Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.563993 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"0fe7ac7f-c7a0-42a5-8089-059c05c40d3e","Type":"ContainerStarted","Data":"fcbc28edd5b28a6f6278ff654010ab87dba77027aacd53e6eba9deb13a7f08d2"} Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.565145 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"1e148ee1-66bc-4300-a27a-a8c4ce515d51","Type":"ContainerStarted","Data":"f531ece5aaa858acd13509f06b31486848e20f4659b48e9f20e8329ee9b87845"} Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.566327 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"f80ae063-860a-4997-9c9f-57bc3a850e37","Type":"ContainerStarted","Data":"3eaace2f9b0aed40df6a26c28fe5f12235e819b65e60e1d4aa62569ea4beb30a"} Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.568178 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-qq5zn" event={"ID":"6b699340-4bbd-4df4-951b-9404b0545d24","Type":"ContainerStarted","Data":"d9e76e865c58e0f3e12e9d04dfd964dfea60cf65764f306a3c245bb3bbb3ebe3"} Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.569854 5002 generic.go:334] "Generic (PLEG): container finished" podID="dde3e2f9-15fd-4f59-8570-02fa37c770e0" containerID="a8d151e1994f5fc5f79282f383a2f88900d2d9c7a4e1c149a05246a5d6fc4fda" exitCode=0 Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.569913 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-dkhh2" event={"ID":"dde3e2f9-15fd-4f59-8570-02fa37c770e0","Type":"ContainerDied","Data":"a8d151e1994f5fc5f79282f383a2f88900d2d9c7a4e1c149a05246a5d6fc4fda"} Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.572977 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-gpdm5" event={"ID":"9e945411-e51f-48af-ac88-de0fff99c16e","Type":"ContainerDied","Data":"cb036afd39ec1a862a110f81d0a42c4cd8e825156b761693bf3bf725b037d4ff"} Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.573034 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-gpdm5" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.593213 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"9d5d9337-f90f-4576-94dc-805d4e653801","Type":"ContainerStarted","Data":"c9286f73e0239e859318646d4204cb88265b509f4088e305cfe4915d6b5b6097"} Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.596490 5002 generic.go:334] "Generic (PLEG): container finished" podID="f69d7010-1d73-410f-a5f1-5aed01494f42" containerID="98bf594c4f007832b8dda391a700a183219a5fe942ba5f33059cbfd1f1fb7476" exitCode=0 Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.596585 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-mp9f9" event={"ID":"f69d7010-1d73-410f-a5f1-5aed01494f42","Type":"ContainerDied","Data":"98bf594c4f007832b8dda391a700a183219a5fe942ba5f33059cbfd1f1fb7476"} Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.599160 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"26e7b906-c14a-4084-926c-2d2c7ce201be","Type":"ContainerStarted","Data":"39fbc7a9975fe4800644205c409f29e430ad3430463dfd9a207d9fad892af902"} Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.605105 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-t7dh5" event={"ID":"386f8b04-affa-45f6-b394-f4f83cda6cb7","Type":"ContainerDied","Data":"391f4446362fde8f5b95b1845b96f8642f1c0102eef38640f9f67b2907046869"} Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.605175 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-t7dh5" Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.712446 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-t7dh5"] Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.748586 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-t7dh5"] Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.757058 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-gpdm5"] Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.767785 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-gpdm5"] Sep 30 12:35:27 crc kubenswrapper[5002]: I0930 12:35:27.936074 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 12:35:27 crc kubenswrapper[5002]: W0930 12:35:27.941568 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7304f60c_c750_409f_bca4_4fd12c239891.slice/crio-4398c428112b7f8cbd4d81bf5afb09cbb9b1d1fe7556b1baaa48aa9130b6b997 WatchSource:0}: Error finding container 4398c428112b7f8cbd4d81bf5afb09cbb9b1d1fe7556b1baaa48aa9130b6b997: Status 404 returned error can't find the container with id 4398c428112b7f8cbd4d81bf5afb09cbb9b1d1fe7556b1baaa48aa9130b6b997 Sep 30 12:35:28 crc kubenswrapper[5002]: I0930 12:35:28.227892 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-5m55z"] Sep 30 12:35:28 crc kubenswrapper[5002]: W0930 12:35:28.386369 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9d2f8d72_e223_4f96_917e_7b47baba18d8.slice/crio-97046990bf686cb8923b152756bd5e138ee09b5c5a89155d2ca0ed1cb61d6b04 WatchSource:0}: Error finding container 97046990bf686cb8923b152756bd5e138ee09b5c5a89155d2ca0ed1cb61d6b04: Status 404 returned error can't find the container with id 97046990bf686cb8923b152756bd5e138ee09b5c5a89155d2ca0ed1cb61d6b04 Sep 30 12:35:28 crc kubenswrapper[5002]: I0930 12:35:28.619412 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-mp9f9" event={"ID":"f69d7010-1d73-410f-a5f1-5aed01494f42","Type":"ContainerStarted","Data":"d9cf2bddda2789cf2be3774ea5ebd0a70923ddce060ecb4e763f30496bf86416"} Sep 30 12:35:28 crc kubenswrapper[5002]: I0930 12:35:28.619509 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-mp9f9" Sep 30 12:35:28 crc kubenswrapper[5002]: I0930 12:35:28.623140 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5m55z" event={"ID":"9d2f8d72-e223-4f96-917e-7b47baba18d8","Type":"ContainerStarted","Data":"97046990bf686cb8923b152756bd5e138ee09b5c5a89155d2ca0ed1cb61d6b04"} Sep 30 12:35:28 crc kubenswrapper[5002]: I0930 12:35:28.625288 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-dkhh2" event={"ID":"dde3e2f9-15fd-4f59-8570-02fa37c770e0","Type":"ContainerStarted","Data":"bfdbb06f81d12c88d63cfbc035bd90a2d897376cc35923a1ca6b89e9b588d6e5"} Sep 30 12:35:28 crc kubenswrapper[5002]: I0930 12:35:28.625415 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5ccc8479f9-dkhh2" Sep 30 12:35:28 crc kubenswrapper[5002]: I0930 12:35:28.626434 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"7304f60c-c750-409f-bca4-4fd12c239891","Type":"ContainerStarted","Data":"4398c428112b7f8cbd4d81bf5afb09cbb9b1d1fe7556b1baaa48aa9130b6b997"} Sep 30 12:35:28 crc kubenswrapper[5002]: I0930 12:35:28.641878 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-mp9f9" podStartSLOduration=6.208050325 podStartE2EDuration="15.641857572s" podCreationTimestamp="2025-09-30 12:35:13 +0000 UTC" firstStartedPulling="2025-09-30 12:35:16.80603286 +0000 UTC m=+891.055715006" lastFinishedPulling="2025-09-30 12:35:26.239840107 +0000 UTC m=+900.489522253" observedRunningTime="2025-09-30 12:35:28.64109167 +0000 UTC m=+902.890773826" watchObservedRunningTime="2025-09-30 12:35:28.641857572 +0000 UTC m=+902.891539718" Sep 30 12:35:28 crc kubenswrapper[5002]: I0930 12:35:28.661749 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5ccc8479f9-dkhh2" podStartSLOduration=6.195885841 podStartE2EDuration="15.661730955s" podCreationTimestamp="2025-09-30 12:35:13 +0000 UTC" firstStartedPulling="2025-09-30 12:35:16.805730332 +0000 UTC m=+891.055412478" lastFinishedPulling="2025-09-30 12:35:26.271575446 +0000 UTC m=+900.521257592" observedRunningTime="2025-09-30 12:35:28.657625243 +0000 UTC m=+902.907307409" watchObservedRunningTime="2025-09-30 12:35:28.661730955 +0000 UTC m=+902.911413091" Sep 30 12:35:28 crc kubenswrapper[5002]: I0930 12:35:28.689717 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="386f8b04-affa-45f6-b394-f4f83cda6cb7" path="/var/lib/kubelet/pods/386f8b04-affa-45f6-b394-f4f83cda6cb7/volumes" Sep 30 12:35:28 crc kubenswrapper[5002]: I0930 12:35:28.690368 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e945411-e51f-48af-ac88-de0fff99c16e" path="/var/lib/kubelet/pods/9e945411-e51f-48af-ac88-de0fff99c16e/volumes" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.050813 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-qztz9"] Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.056941 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-qztz9" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.059166 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.064258 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-qztz9"] Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.137731 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a0f9c34-465f-4f75-af75-71b2e2d3722a-config\") pod \"ovn-controller-metrics-qztz9\" (UID: \"1a0f9c34-465f-4f75-af75-71b2e2d3722a\") " pod="openstack/ovn-controller-metrics-qztz9" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.138015 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/1a0f9c34-465f-4f75-af75-71b2e2d3722a-ovn-rundir\") pod \"ovn-controller-metrics-qztz9\" (UID: \"1a0f9c34-465f-4f75-af75-71b2e2d3722a\") " pod="openstack/ovn-controller-metrics-qztz9" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.138039 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a0f9c34-465f-4f75-af75-71b2e2d3722a-combined-ca-bundle\") pod \"ovn-controller-metrics-qztz9\" (UID: \"1a0f9c34-465f-4f75-af75-71b2e2d3722a\") " pod="openstack/ovn-controller-metrics-qztz9" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.138072 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a0f9c34-465f-4f75-af75-71b2e2d3722a-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-qztz9\" (UID: \"1a0f9c34-465f-4f75-af75-71b2e2d3722a\") " pod="openstack/ovn-controller-metrics-qztz9" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.138093 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whjb6\" (UniqueName: \"kubernetes.io/projected/1a0f9c34-465f-4f75-af75-71b2e2d3722a-kube-api-access-whjb6\") pod \"ovn-controller-metrics-qztz9\" (UID: \"1a0f9c34-465f-4f75-af75-71b2e2d3722a\") " pod="openstack/ovn-controller-metrics-qztz9" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.138915 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/1a0f9c34-465f-4f75-af75-71b2e2d3722a-ovs-rundir\") pod \"ovn-controller-metrics-qztz9\" (UID: \"1a0f9c34-465f-4f75-af75-71b2e2d3722a\") " pod="openstack/ovn-controller-metrics-qztz9" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.174241 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-mp9f9"] Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.195625 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-dkh6b"] Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.220846 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-dkh6b" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.220761 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-dkh6b"] Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.225883 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.240903 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/1a0f9c34-465f-4f75-af75-71b2e2d3722a-ovn-rundir\") pod \"ovn-controller-metrics-qztz9\" (UID: \"1a0f9c34-465f-4f75-af75-71b2e2d3722a\") " pod="openstack/ovn-controller-metrics-qztz9" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.241050 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a0f9c34-465f-4f75-af75-71b2e2d3722a-combined-ca-bundle\") pod \"ovn-controller-metrics-qztz9\" (UID: \"1a0f9c34-465f-4f75-af75-71b2e2d3722a\") " pod="openstack/ovn-controller-metrics-qztz9" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.241234 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a0f9c34-465f-4f75-af75-71b2e2d3722a-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-qztz9\" (UID: \"1a0f9c34-465f-4f75-af75-71b2e2d3722a\") " pod="openstack/ovn-controller-metrics-qztz9" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.241331 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whjb6\" (UniqueName: \"kubernetes.io/projected/1a0f9c34-465f-4f75-af75-71b2e2d3722a-kube-api-access-whjb6\") pod \"ovn-controller-metrics-qztz9\" (UID: \"1a0f9c34-465f-4f75-af75-71b2e2d3722a\") " pod="openstack/ovn-controller-metrics-qztz9" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.241496 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/1a0f9c34-465f-4f75-af75-71b2e2d3722a-ovs-rundir\") pod \"ovn-controller-metrics-qztz9\" (UID: \"1a0f9c34-465f-4f75-af75-71b2e2d3722a\") " pod="openstack/ovn-controller-metrics-qztz9" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.241624 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a0f9c34-465f-4f75-af75-71b2e2d3722a-config\") pod \"ovn-controller-metrics-qztz9\" (UID: \"1a0f9c34-465f-4f75-af75-71b2e2d3722a\") " pod="openstack/ovn-controller-metrics-qztz9" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.242268 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/1a0f9c34-465f-4f75-af75-71b2e2d3722a-ovs-rundir\") pod \"ovn-controller-metrics-qztz9\" (UID: \"1a0f9c34-465f-4f75-af75-71b2e2d3722a\") " pod="openstack/ovn-controller-metrics-qztz9" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.242454 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a0f9c34-465f-4f75-af75-71b2e2d3722a-config\") pod \"ovn-controller-metrics-qztz9\" (UID: \"1a0f9c34-465f-4f75-af75-71b2e2d3722a\") " pod="openstack/ovn-controller-metrics-qztz9" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.244404 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/1a0f9c34-465f-4f75-af75-71b2e2d3722a-ovn-rundir\") pod \"ovn-controller-metrics-qztz9\" (UID: \"1a0f9c34-465f-4f75-af75-71b2e2d3722a\") " pod="openstack/ovn-controller-metrics-qztz9" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.248294 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a0f9c34-465f-4f75-af75-71b2e2d3722a-combined-ca-bundle\") pod \"ovn-controller-metrics-qztz9\" (UID: \"1a0f9c34-465f-4f75-af75-71b2e2d3722a\") " pod="openstack/ovn-controller-metrics-qztz9" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.250801 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a0f9c34-465f-4f75-af75-71b2e2d3722a-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-qztz9\" (UID: \"1a0f9c34-465f-4f75-af75-71b2e2d3722a\") " pod="openstack/ovn-controller-metrics-qztz9" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.264431 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whjb6\" (UniqueName: \"kubernetes.io/projected/1a0f9c34-465f-4f75-af75-71b2e2d3722a-kube-api-access-whjb6\") pod \"ovn-controller-metrics-qztz9\" (UID: \"1a0f9c34-465f-4f75-af75-71b2e2d3722a\") " pod="openstack/ovn-controller-metrics-qztz9" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.333164 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-dkhh2"] Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.342752 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cf7fb635-0923-43a8-b156-2a8ffc8ebd64-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-dkh6b\" (UID: \"cf7fb635-0923-43a8-b156-2a8ffc8ebd64\") " pod="openstack/dnsmasq-dns-7fd796d7df-dkh6b" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.342799 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf7fb635-0923-43a8-b156-2a8ffc8ebd64-config\") pod \"dnsmasq-dns-7fd796d7df-dkh6b\" (UID: \"cf7fb635-0923-43a8-b156-2a8ffc8ebd64\") " pod="openstack/dnsmasq-dns-7fd796d7df-dkh6b" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.342819 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rz2rl\" (UniqueName: \"kubernetes.io/projected/cf7fb635-0923-43a8-b156-2a8ffc8ebd64-kube-api-access-rz2rl\") pod \"dnsmasq-dns-7fd796d7df-dkh6b\" (UID: \"cf7fb635-0923-43a8-b156-2a8ffc8ebd64\") " pod="openstack/dnsmasq-dns-7fd796d7df-dkh6b" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.342871 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cf7fb635-0923-43a8-b156-2a8ffc8ebd64-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-dkh6b\" (UID: \"cf7fb635-0923-43a8-b156-2a8ffc8ebd64\") " pod="openstack/dnsmasq-dns-7fd796d7df-dkh6b" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.363925 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-gxkxn"] Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.365240 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-gxkxn" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.367342 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.383001 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-qztz9" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.384940 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-gxkxn"] Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.444105 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/05a428f5-0352-4fca-af1a-a6bcd2fb227b-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-gxkxn\" (UID: \"05a428f5-0352-4fca-af1a-a6bcd2fb227b\") " pod="openstack/dnsmasq-dns-86db49b7ff-gxkxn" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.444156 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/05a428f5-0352-4fca-af1a-a6bcd2fb227b-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-gxkxn\" (UID: \"05a428f5-0352-4fca-af1a-a6bcd2fb227b\") " pod="openstack/dnsmasq-dns-86db49b7ff-gxkxn" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.444191 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cf7fb635-0923-43a8-b156-2a8ffc8ebd64-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-dkh6b\" (UID: \"cf7fb635-0923-43a8-b156-2a8ffc8ebd64\") " pod="openstack/dnsmasq-dns-7fd796d7df-dkh6b" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.444263 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05a428f5-0352-4fca-af1a-a6bcd2fb227b-config\") pod \"dnsmasq-dns-86db49b7ff-gxkxn\" (UID: \"05a428f5-0352-4fca-af1a-a6bcd2fb227b\") " pod="openstack/dnsmasq-dns-86db49b7ff-gxkxn" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.444293 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cf7fb635-0923-43a8-b156-2a8ffc8ebd64-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-dkh6b\" (UID: \"cf7fb635-0923-43a8-b156-2a8ffc8ebd64\") " pod="openstack/dnsmasq-dns-7fd796d7df-dkh6b" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.444312 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/05a428f5-0352-4fca-af1a-a6bcd2fb227b-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-gxkxn\" (UID: \"05a428f5-0352-4fca-af1a-a6bcd2fb227b\") " pod="openstack/dnsmasq-dns-86db49b7ff-gxkxn" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.444339 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf7fb635-0923-43a8-b156-2a8ffc8ebd64-config\") pod \"dnsmasq-dns-7fd796d7df-dkh6b\" (UID: \"cf7fb635-0923-43a8-b156-2a8ffc8ebd64\") " pod="openstack/dnsmasq-dns-7fd796d7df-dkh6b" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.444360 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rz2rl\" (UniqueName: \"kubernetes.io/projected/cf7fb635-0923-43a8-b156-2a8ffc8ebd64-kube-api-access-rz2rl\") pod \"dnsmasq-dns-7fd796d7df-dkh6b\" (UID: \"cf7fb635-0923-43a8-b156-2a8ffc8ebd64\") " pod="openstack/dnsmasq-dns-7fd796d7df-dkh6b" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.444385 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ff4ng\" (UniqueName: \"kubernetes.io/projected/05a428f5-0352-4fca-af1a-a6bcd2fb227b-kube-api-access-ff4ng\") pod \"dnsmasq-dns-86db49b7ff-gxkxn\" (UID: \"05a428f5-0352-4fca-af1a-a6bcd2fb227b\") " pod="openstack/dnsmasq-dns-86db49b7ff-gxkxn" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.445417 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cf7fb635-0923-43a8-b156-2a8ffc8ebd64-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-dkh6b\" (UID: \"cf7fb635-0923-43a8-b156-2a8ffc8ebd64\") " pod="openstack/dnsmasq-dns-7fd796d7df-dkh6b" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.445483 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cf7fb635-0923-43a8-b156-2a8ffc8ebd64-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-dkh6b\" (UID: \"cf7fb635-0923-43a8-b156-2a8ffc8ebd64\") " pod="openstack/dnsmasq-dns-7fd796d7df-dkh6b" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.445582 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf7fb635-0923-43a8-b156-2a8ffc8ebd64-config\") pod \"dnsmasq-dns-7fd796d7df-dkh6b\" (UID: \"cf7fb635-0923-43a8-b156-2a8ffc8ebd64\") " pod="openstack/dnsmasq-dns-7fd796d7df-dkh6b" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.463393 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rz2rl\" (UniqueName: \"kubernetes.io/projected/cf7fb635-0923-43a8-b156-2a8ffc8ebd64-kube-api-access-rz2rl\") pod \"dnsmasq-dns-7fd796d7df-dkh6b\" (UID: \"cf7fb635-0923-43a8-b156-2a8ffc8ebd64\") " pod="openstack/dnsmasq-dns-7fd796d7df-dkh6b" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.546446 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05a428f5-0352-4fca-af1a-a6bcd2fb227b-config\") pod \"dnsmasq-dns-86db49b7ff-gxkxn\" (UID: \"05a428f5-0352-4fca-af1a-a6bcd2fb227b\") " pod="openstack/dnsmasq-dns-86db49b7ff-gxkxn" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.546547 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/05a428f5-0352-4fca-af1a-a6bcd2fb227b-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-gxkxn\" (UID: \"05a428f5-0352-4fca-af1a-a6bcd2fb227b\") " pod="openstack/dnsmasq-dns-86db49b7ff-gxkxn" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.546601 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ff4ng\" (UniqueName: \"kubernetes.io/projected/05a428f5-0352-4fca-af1a-a6bcd2fb227b-kube-api-access-ff4ng\") pod \"dnsmasq-dns-86db49b7ff-gxkxn\" (UID: \"05a428f5-0352-4fca-af1a-a6bcd2fb227b\") " pod="openstack/dnsmasq-dns-86db49b7ff-gxkxn" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.546667 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/05a428f5-0352-4fca-af1a-a6bcd2fb227b-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-gxkxn\" (UID: \"05a428f5-0352-4fca-af1a-a6bcd2fb227b\") " pod="openstack/dnsmasq-dns-86db49b7ff-gxkxn" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.546696 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/05a428f5-0352-4fca-af1a-a6bcd2fb227b-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-gxkxn\" (UID: \"05a428f5-0352-4fca-af1a-a6bcd2fb227b\") " pod="openstack/dnsmasq-dns-86db49b7ff-gxkxn" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.547328 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05a428f5-0352-4fca-af1a-a6bcd2fb227b-config\") pod \"dnsmasq-dns-86db49b7ff-gxkxn\" (UID: \"05a428f5-0352-4fca-af1a-a6bcd2fb227b\") " pod="openstack/dnsmasq-dns-86db49b7ff-gxkxn" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.547727 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/05a428f5-0352-4fca-af1a-a6bcd2fb227b-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-gxkxn\" (UID: \"05a428f5-0352-4fca-af1a-a6bcd2fb227b\") " pod="openstack/dnsmasq-dns-86db49b7ff-gxkxn" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.548446 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/05a428f5-0352-4fca-af1a-a6bcd2fb227b-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-gxkxn\" (UID: \"05a428f5-0352-4fca-af1a-a6bcd2fb227b\") " pod="openstack/dnsmasq-dns-86db49b7ff-gxkxn" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.548609 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/05a428f5-0352-4fca-af1a-a6bcd2fb227b-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-gxkxn\" (UID: \"05a428f5-0352-4fca-af1a-a6bcd2fb227b\") " pod="openstack/dnsmasq-dns-86db49b7ff-gxkxn" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.552169 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-dkh6b" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.563061 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ff4ng\" (UniqueName: \"kubernetes.io/projected/05a428f5-0352-4fca-af1a-a6bcd2fb227b-kube-api-access-ff4ng\") pod \"dnsmasq-dns-86db49b7ff-gxkxn\" (UID: \"05a428f5-0352-4fca-af1a-a6bcd2fb227b\") " pod="openstack/dnsmasq-dns-86db49b7ff-gxkxn" Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.642231 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-mp9f9" podUID="f69d7010-1d73-410f-a5f1-5aed01494f42" containerName="dnsmasq-dns" containerID="cri-o://d9cf2bddda2789cf2be3774ea5ebd0a70923ddce060ecb4e763f30496bf86416" gracePeriod=10 Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.642394 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5ccc8479f9-dkhh2" podUID="dde3e2f9-15fd-4f59-8570-02fa37c770e0" containerName="dnsmasq-dns" containerID="cri-o://bfdbb06f81d12c88d63cfbc035bd90a2d897376cc35923a1ca6b89e9b588d6e5" gracePeriod=10 Sep 30 12:35:30 crc kubenswrapper[5002]: I0930 12:35:30.685323 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-gxkxn" Sep 30 12:35:31 crc kubenswrapper[5002]: I0930 12:35:31.663004 5002 generic.go:334] "Generic (PLEG): container finished" podID="dde3e2f9-15fd-4f59-8570-02fa37c770e0" containerID="bfdbb06f81d12c88d63cfbc035bd90a2d897376cc35923a1ca6b89e9b588d6e5" exitCode=0 Sep 30 12:35:31 crc kubenswrapper[5002]: I0930 12:35:31.663666 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-dkhh2" event={"ID":"dde3e2f9-15fd-4f59-8570-02fa37c770e0","Type":"ContainerDied","Data":"bfdbb06f81d12c88d63cfbc035bd90a2d897376cc35923a1ca6b89e9b588d6e5"} Sep 30 12:35:31 crc kubenswrapper[5002]: I0930 12:35:31.666012 5002 generic.go:334] "Generic (PLEG): container finished" podID="f69d7010-1d73-410f-a5f1-5aed01494f42" containerID="d9cf2bddda2789cf2be3774ea5ebd0a70923ddce060ecb4e763f30496bf86416" exitCode=0 Sep 30 12:35:31 crc kubenswrapper[5002]: I0930 12:35:31.666060 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-mp9f9" event={"ID":"f69d7010-1d73-410f-a5f1-5aed01494f42","Type":"ContainerDied","Data":"d9cf2bddda2789cf2be3774ea5ebd0a70923ddce060ecb4e763f30496bf86416"} Sep 30 12:35:32 crc kubenswrapper[5002]: I0930 12:35:32.097951 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:35:32 crc kubenswrapper[5002]: I0930 12:35:32.098017 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:35:34 crc kubenswrapper[5002]: I0930 12:35:34.353067 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-dkhh2" Sep 30 12:35:34 crc kubenswrapper[5002]: I0930 12:35:34.361860 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-mp9f9" Sep 30 12:35:34 crc kubenswrapper[5002]: I0930 12:35:34.404434 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6pgcs\" (UniqueName: \"kubernetes.io/projected/dde3e2f9-15fd-4f59-8570-02fa37c770e0-kube-api-access-6pgcs\") pod \"dde3e2f9-15fd-4f59-8570-02fa37c770e0\" (UID: \"dde3e2f9-15fd-4f59-8570-02fa37c770e0\") " Sep 30 12:35:34 crc kubenswrapper[5002]: I0930 12:35:34.404571 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f69d7010-1d73-410f-a5f1-5aed01494f42-dns-svc\") pod \"f69d7010-1d73-410f-a5f1-5aed01494f42\" (UID: \"f69d7010-1d73-410f-a5f1-5aed01494f42\") " Sep 30 12:35:34 crc kubenswrapper[5002]: I0930 12:35:34.404603 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dde3e2f9-15fd-4f59-8570-02fa37c770e0-dns-svc\") pod \"dde3e2f9-15fd-4f59-8570-02fa37c770e0\" (UID: \"dde3e2f9-15fd-4f59-8570-02fa37c770e0\") " Sep 30 12:35:34 crc kubenswrapper[5002]: I0930 12:35:34.404744 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jwl62\" (UniqueName: \"kubernetes.io/projected/f69d7010-1d73-410f-a5f1-5aed01494f42-kube-api-access-jwl62\") pod \"f69d7010-1d73-410f-a5f1-5aed01494f42\" (UID: \"f69d7010-1d73-410f-a5f1-5aed01494f42\") " Sep 30 12:35:34 crc kubenswrapper[5002]: I0930 12:35:34.404777 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dde3e2f9-15fd-4f59-8570-02fa37c770e0-config\") pod \"dde3e2f9-15fd-4f59-8570-02fa37c770e0\" (UID: \"dde3e2f9-15fd-4f59-8570-02fa37c770e0\") " Sep 30 12:35:34 crc kubenswrapper[5002]: I0930 12:35:34.404806 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f69d7010-1d73-410f-a5f1-5aed01494f42-config\") pod \"f69d7010-1d73-410f-a5f1-5aed01494f42\" (UID: \"f69d7010-1d73-410f-a5f1-5aed01494f42\") " Sep 30 12:35:34 crc kubenswrapper[5002]: I0930 12:35:34.411238 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f69d7010-1d73-410f-a5f1-5aed01494f42-kube-api-access-jwl62" (OuterVolumeSpecName: "kube-api-access-jwl62") pod "f69d7010-1d73-410f-a5f1-5aed01494f42" (UID: "f69d7010-1d73-410f-a5f1-5aed01494f42"). InnerVolumeSpecName "kube-api-access-jwl62". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:35:34 crc kubenswrapper[5002]: I0930 12:35:34.411293 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dde3e2f9-15fd-4f59-8570-02fa37c770e0-kube-api-access-6pgcs" (OuterVolumeSpecName: "kube-api-access-6pgcs") pod "dde3e2f9-15fd-4f59-8570-02fa37c770e0" (UID: "dde3e2f9-15fd-4f59-8570-02fa37c770e0"). InnerVolumeSpecName "kube-api-access-6pgcs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:35:34 crc kubenswrapper[5002]: I0930 12:35:34.447213 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f69d7010-1d73-410f-a5f1-5aed01494f42-config" (OuterVolumeSpecName: "config") pod "f69d7010-1d73-410f-a5f1-5aed01494f42" (UID: "f69d7010-1d73-410f-a5f1-5aed01494f42"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:35:34 crc kubenswrapper[5002]: I0930 12:35:34.459142 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dde3e2f9-15fd-4f59-8570-02fa37c770e0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "dde3e2f9-15fd-4f59-8570-02fa37c770e0" (UID: "dde3e2f9-15fd-4f59-8570-02fa37c770e0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:35:34 crc kubenswrapper[5002]: I0930 12:35:34.465865 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f69d7010-1d73-410f-a5f1-5aed01494f42-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f69d7010-1d73-410f-a5f1-5aed01494f42" (UID: "f69d7010-1d73-410f-a5f1-5aed01494f42"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:35:34 crc kubenswrapper[5002]: I0930 12:35:34.468375 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dde3e2f9-15fd-4f59-8570-02fa37c770e0-config" (OuterVolumeSpecName: "config") pod "dde3e2f9-15fd-4f59-8570-02fa37c770e0" (UID: "dde3e2f9-15fd-4f59-8570-02fa37c770e0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:35:34 crc kubenswrapper[5002]: I0930 12:35:34.506975 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f69d7010-1d73-410f-a5f1-5aed01494f42-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 12:35:34 crc kubenswrapper[5002]: I0930 12:35:34.507012 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dde3e2f9-15fd-4f59-8570-02fa37c770e0-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 12:35:34 crc kubenswrapper[5002]: I0930 12:35:34.507025 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jwl62\" (UniqueName: \"kubernetes.io/projected/f69d7010-1d73-410f-a5f1-5aed01494f42-kube-api-access-jwl62\") on node \"crc\" DevicePath \"\"" Sep 30 12:35:34 crc kubenswrapper[5002]: I0930 12:35:34.507049 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dde3e2f9-15fd-4f59-8570-02fa37c770e0-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:35:34 crc kubenswrapper[5002]: I0930 12:35:34.507060 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f69d7010-1d73-410f-a5f1-5aed01494f42-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:35:34 crc kubenswrapper[5002]: I0930 12:35:34.507079 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6pgcs\" (UniqueName: \"kubernetes.io/projected/dde3e2f9-15fd-4f59-8570-02fa37c770e0-kube-api-access-6pgcs\") on node \"crc\" DevicePath \"\"" Sep 30 12:35:34 crc kubenswrapper[5002]: I0930 12:35:34.697758 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-dkhh2" event={"ID":"dde3e2f9-15fd-4f59-8570-02fa37c770e0","Type":"ContainerDied","Data":"105800949bc2720c1bf784ca1b9056066c1774e8a88a336dbcb2c9adbe0b9f8d"} Sep 30 12:35:34 crc kubenswrapper[5002]: I0930 12:35:34.697786 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-dkhh2" Sep 30 12:35:34 crc kubenswrapper[5002]: I0930 12:35:34.697814 5002 scope.go:117] "RemoveContainer" containerID="bfdbb06f81d12c88d63cfbc035bd90a2d897376cc35923a1ca6b89e9b588d6e5" Sep 30 12:35:34 crc kubenswrapper[5002]: I0930 12:35:34.703429 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-mp9f9" event={"ID":"f69d7010-1d73-410f-a5f1-5aed01494f42","Type":"ContainerDied","Data":"beedc2c2bb93ac57e391c26246b9c667ce4dc4c25fa05455227ab0ff2dabeaca"} Sep 30 12:35:34 crc kubenswrapper[5002]: I0930 12:35:34.703525 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-mp9f9" Sep 30 12:35:34 crc kubenswrapper[5002]: I0930 12:35:34.739376 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-dkhh2"] Sep 30 12:35:34 crc kubenswrapper[5002]: I0930 12:35:34.746682 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-dkhh2"] Sep 30 12:35:34 crc kubenswrapper[5002]: I0930 12:35:34.752646 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-mp9f9"] Sep 30 12:35:34 crc kubenswrapper[5002]: I0930 12:35:34.758716 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-mp9f9"] Sep 30 12:35:35 crc kubenswrapper[5002]: I0930 12:35:35.655864 5002 scope.go:117] "RemoveContainer" containerID="a8d151e1994f5fc5f79282f383a2f88900d2d9c7a4e1c149a05246a5d6fc4fda" Sep 30 12:35:35 crc kubenswrapper[5002]: I0930 12:35:35.754821 5002 scope.go:117] "RemoveContainer" containerID="d9cf2bddda2789cf2be3774ea5ebd0a70923ddce060ecb4e763f30496bf86416" Sep 30 12:35:35 crc kubenswrapper[5002]: I0930 12:35:35.899792 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-gxkxn"] Sep 30 12:35:35 crc kubenswrapper[5002]: W0930 12:35:35.919414 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod05a428f5_0352_4fca_af1a_a6bcd2fb227b.slice/crio-2023c13c6506b06bda09964c00b9b98cd4f34dffc44b54c472db40976be71b60 WatchSource:0}: Error finding container 2023c13c6506b06bda09964c00b9b98cd4f34dffc44b54c472db40976be71b60: Status 404 returned error can't find the container with id 2023c13c6506b06bda09964c00b9b98cd4f34dffc44b54c472db40976be71b60 Sep 30 12:35:35 crc kubenswrapper[5002]: I0930 12:35:35.925265 5002 scope.go:117] "RemoveContainer" containerID="98bf594c4f007832b8dda391a700a183219a5fe942ba5f33059cbfd1f1fb7476" Sep 30 12:35:36 crc kubenswrapper[5002]: I0930 12:35:36.107715 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-qztz9"] Sep 30 12:35:36 crc kubenswrapper[5002]: I0930 12:35:36.188744 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-dkh6b"] Sep 30 12:35:36 crc kubenswrapper[5002]: W0930 12:35:36.424757 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf7fb635_0923_43a8_b156_2a8ffc8ebd64.slice/crio-a92af43ccf81b5230a8728f4dce90d3f92be7b01f7549e096a2fa287a4599806 WatchSource:0}: Error finding container a92af43ccf81b5230a8728f4dce90d3f92be7b01f7549e096a2fa287a4599806: Status 404 returned error can't find the container with id a92af43ccf81b5230a8728f4dce90d3f92be7b01f7549e096a2fa287a4599806 Sep 30 12:35:36 crc kubenswrapper[5002]: I0930 12:35:36.701996 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dde3e2f9-15fd-4f59-8570-02fa37c770e0" path="/var/lib/kubelet/pods/dde3e2f9-15fd-4f59-8570-02fa37c770e0/volumes" Sep 30 12:35:36 crc kubenswrapper[5002]: I0930 12:35:36.703172 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f69d7010-1d73-410f-a5f1-5aed01494f42" path="/var/lib/kubelet/pods/f69d7010-1d73-410f-a5f1-5aed01494f42/volumes" Sep 30 12:35:36 crc kubenswrapper[5002]: I0930 12:35:36.751202 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"7864c645-ac32-48bb-a292-0ff4ec2a5955","Type":"ContainerStarted","Data":"cd955a824c5414829b121191ff504da80bd36940d0ebfd3355f2fe56319b545d"} Sep 30 12:35:36 crc kubenswrapper[5002]: I0930 12:35:36.751696 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Sep 30 12:35:36 crc kubenswrapper[5002]: I0930 12:35:36.756881 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-dkh6b" event={"ID":"cf7fb635-0923-43a8-b156-2a8ffc8ebd64","Type":"ContainerStarted","Data":"a92af43ccf81b5230a8728f4dce90d3f92be7b01f7549e096a2fa287a4599806"} Sep 30 12:35:36 crc kubenswrapper[5002]: I0930 12:35:36.758016 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-qztz9" event={"ID":"1a0f9c34-465f-4f75-af75-71b2e2d3722a","Type":"ContainerStarted","Data":"f6a29147ed95b7b3d83371e13cfbe00eb1068c2ff9e03b42ef4ee9b52bf41ace"} Sep 30 12:35:36 crc kubenswrapper[5002]: I0930 12:35:36.761353 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-gxkxn" event={"ID":"05a428f5-0352-4fca-af1a-a6bcd2fb227b","Type":"ContainerStarted","Data":"2023c13c6506b06bda09964c00b9b98cd4f34dffc44b54c472db40976be71b60"} Sep 30 12:35:37 crc kubenswrapper[5002]: I0930 12:35:37.000246 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=10.541563845 podStartE2EDuration="19.000223763s" podCreationTimestamp="2025-09-30 12:35:18 +0000 UTC" firstStartedPulling="2025-09-30 12:35:27.213387461 +0000 UTC m=+901.463069607" lastFinishedPulling="2025-09-30 12:35:35.672047349 +0000 UTC m=+909.921729525" observedRunningTime="2025-09-30 12:35:36.994542047 +0000 UTC m=+911.244224293" watchObservedRunningTime="2025-09-30 12:35:37.000223763 +0000 UTC m=+911.249905909" Sep 30 12:35:37 crc kubenswrapper[5002]: I0930 12:35:37.784364 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-qq5zn" event={"ID":"6b699340-4bbd-4df4-951b-9404b0545d24","Type":"ContainerStarted","Data":"d473f07cb7362d0799b3e8315adc96d90592bdcbf5cb5b059abdce6eac3c6df1"} Sep 30 12:35:37 crc kubenswrapper[5002]: I0930 12:35:37.784753 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-qq5zn" Sep 30 12:35:37 crc kubenswrapper[5002]: I0930 12:35:37.787233 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"e109d20d-1925-4779-bbff-50bd39214d34","Type":"ContainerStarted","Data":"34d352f5f74aef00b5574d1d5d2c433a1a407bc183b8f72b412b23aefdaa8276"} Sep 30 12:35:37 crc kubenswrapper[5002]: I0930 12:35:37.788856 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"0fe7ac7f-c7a0-42a5-8089-059c05c40d3e","Type":"ContainerStarted","Data":"8a2465dc4997baab7ca676ac6c30236214abccdb4bd8a14646dcde7c78dc5cfd"} Sep 30 12:35:37 crc kubenswrapper[5002]: I0930 12:35:37.789219 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 30 12:35:37 crc kubenswrapper[5002]: I0930 12:35:37.790971 5002 generic.go:334] "Generic (PLEG): container finished" podID="9d2f8d72-e223-4f96-917e-7b47baba18d8" containerID="ed9b896c43b37f574908e613c28822f1ea7b2d508c72f62a337eebd2ffee2c67" exitCode=0 Sep 30 12:35:37 crc kubenswrapper[5002]: I0930 12:35:37.791010 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5m55z" event={"ID":"9d2f8d72-e223-4f96-917e-7b47baba18d8","Type":"ContainerDied","Data":"ed9b896c43b37f574908e613c28822f1ea7b2d508c72f62a337eebd2ffee2c67"} Sep 30 12:35:37 crc kubenswrapper[5002]: I0930 12:35:37.793365 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"1e148ee1-66bc-4300-a27a-a8c4ce515d51","Type":"ContainerStarted","Data":"b1d032dd721ab71ede7608fc67c64638cea77519f9e9f2e11b6fac2ec580ba86"} Sep 30 12:35:37 crc kubenswrapper[5002]: I0930 12:35:37.795404 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"f80ae063-860a-4997-9c9f-57bc3a850e37","Type":"ContainerStarted","Data":"71b9c072672276657eaef2a61497bf03d34a5781bcf2885cdb55475c104f5798"} Sep 30 12:35:37 crc kubenswrapper[5002]: I0930 12:35:37.799655 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"26e7b906-c14a-4084-926c-2d2c7ce201be","Type":"ContainerStarted","Data":"572a006f4b69c44220f3b6f964c85c72e58db661b0e2164d4eeeae9deaff60cf"} Sep 30 12:35:37 crc kubenswrapper[5002]: I0930 12:35:37.803296 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"7304f60c-c750-409f-bca4-4fd12c239891","Type":"ContainerStarted","Data":"372847c104711baa7c6a0396efbe61493c53e6d7db78f997170f5f8f158502b9"} Sep 30 12:35:37 crc kubenswrapper[5002]: I0930 12:35:37.805018 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-qq5zn" podStartSLOduration=6.284618589 podStartE2EDuration="14.805005047s" podCreationTimestamp="2025-09-30 12:35:23 +0000 UTC" firstStartedPulling="2025-09-30 12:35:27.151718203 +0000 UTC m=+901.401400349" lastFinishedPulling="2025-09-30 12:35:35.672104661 +0000 UTC m=+909.921786807" observedRunningTime="2025-09-30 12:35:37.799843075 +0000 UTC m=+912.049525211" watchObservedRunningTime="2025-09-30 12:35:37.805005047 +0000 UTC m=+912.054687193" Sep 30 12:35:37 crc kubenswrapper[5002]: I0930 12:35:37.805263 5002 generic.go:334] "Generic (PLEG): container finished" podID="cf7fb635-0923-43a8-b156-2a8ffc8ebd64" containerID="450702435b5ab4b086fc2434d0ee4cb200a5f75e25300f697ac25f4ae5904abb" exitCode=0 Sep 30 12:35:37 crc kubenswrapper[5002]: I0930 12:35:37.805348 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-dkh6b" event={"ID":"cf7fb635-0923-43a8-b156-2a8ffc8ebd64","Type":"ContainerDied","Data":"450702435b5ab4b086fc2434d0ee4cb200a5f75e25300f697ac25f4ae5904abb"} Sep 30 12:35:37 crc kubenswrapper[5002]: I0930 12:35:37.806889 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"9d5d9337-f90f-4576-94dc-805d4e653801","Type":"ContainerStarted","Data":"a5c55d61c792112007ad928cc3f14669731037ec86a2f601b627535947adc7ba"} Sep 30 12:35:37 crc kubenswrapper[5002]: I0930 12:35:37.808503 5002 generic.go:334] "Generic (PLEG): container finished" podID="05a428f5-0352-4fca-af1a-a6bcd2fb227b" containerID="2a192cd33f2b86990736c6feead1fd39975cb97e1c58aa0daa41025cc23ab0b0" exitCode=0 Sep 30 12:35:37 crc kubenswrapper[5002]: I0930 12:35:37.808540 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-gxkxn" event={"ID":"05a428f5-0352-4fca-af1a-a6bcd2fb227b","Type":"ContainerDied","Data":"2a192cd33f2b86990736c6feead1fd39975cb97e1c58aa0daa41025cc23ab0b0"} Sep 30 12:35:37 crc kubenswrapper[5002]: I0930 12:35:37.866979 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=9.341372394 podStartE2EDuration="18.866964203s" podCreationTimestamp="2025-09-30 12:35:19 +0000 UTC" firstStartedPulling="2025-09-30 12:35:27.141690608 +0000 UTC m=+901.391372754" lastFinishedPulling="2025-09-30 12:35:36.667282417 +0000 UTC m=+910.916964563" observedRunningTime="2025-09-30 12:35:37.860100025 +0000 UTC m=+912.109782171" watchObservedRunningTime="2025-09-30 12:35:37.866964203 +0000 UTC m=+912.116646349" Sep 30 12:35:38 crc kubenswrapper[5002]: I0930 12:35:38.810683 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5ccc8479f9-dkhh2" podUID="dde3e2f9-15fd-4f59-8570-02fa37c770e0" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.98:5353: i/o timeout" Sep 30 12:35:38 crc kubenswrapper[5002]: I0930 12:35:38.820888 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-gxkxn" event={"ID":"05a428f5-0352-4fca-af1a-a6bcd2fb227b","Type":"ContainerStarted","Data":"cbc411e792f3b04777aa53906752ba038c4b07bf2256fe5e9846b56560c7dfc1"} Sep 30 12:35:38 crc kubenswrapper[5002]: I0930 12:35:38.822039 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86db49b7ff-gxkxn" Sep 30 12:35:38 crc kubenswrapper[5002]: I0930 12:35:38.825212 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5m55z" event={"ID":"9d2f8d72-e223-4f96-917e-7b47baba18d8","Type":"ContainerStarted","Data":"8c1e133f6123f725fef7396b8dec90ae75a3f4f1e1bffc3e6ac8c52de49b09a1"} Sep 30 12:35:38 crc kubenswrapper[5002]: I0930 12:35:38.831012 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-dkh6b" event={"ID":"cf7fb635-0923-43a8-b156-2a8ffc8ebd64","Type":"ContainerStarted","Data":"7bb058d336a5565c8c399692edff5168050a4a1414b761ed3d67763944ca7db3"} Sep 30 12:35:38 crc kubenswrapper[5002]: I0930 12:35:38.831084 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7fd796d7df-dkh6b" Sep 30 12:35:38 crc kubenswrapper[5002]: I0930 12:35:38.869245 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86db49b7ff-gxkxn" podStartSLOduration=8.869221594 podStartE2EDuration="8.869221594s" podCreationTimestamp="2025-09-30 12:35:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:35:38.845681079 +0000 UTC m=+913.095363235" watchObservedRunningTime="2025-09-30 12:35:38.869221594 +0000 UTC m=+913.118903760" Sep 30 12:35:39 crc kubenswrapper[5002]: I0930 12:35:39.082158 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-57d769cc4f-mp9f9" podUID="f69d7010-1d73-410f-a5f1-5aed01494f42" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.99:5353: i/o timeout" Sep 30 12:35:40 crc kubenswrapper[5002]: I0930 12:35:40.848285 5002 generic.go:334] "Generic (PLEG): container finished" podID="e109d20d-1925-4779-bbff-50bd39214d34" containerID="34d352f5f74aef00b5574d1d5d2c433a1a407bc183b8f72b412b23aefdaa8276" exitCode=0 Sep 30 12:35:40 crc kubenswrapper[5002]: I0930 12:35:40.848414 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"e109d20d-1925-4779-bbff-50bd39214d34","Type":"ContainerDied","Data":"34d352f5f74aef00b5574d1d5d2c433a1a407bc183b8f72b412b23aefdaa8276"} Sep 30 12:35:40 crc kubenswrapper[5002]: I0930 12:35:40.851519 5002 generic.go:334] "Generic (PLEG): container finished" podID="9d5d9337-f90f-4576-94dc-805d4e653801" containerID="a5c55d61c792112007ad928cc3f14669731037ec86a2f601b627535947adc7ba" exitCode=0 Sep 30 12:35:40 crc kubenswrapper[5002]: I0930 12:35:40.851611 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"9d5d9337-f90f-4576-94dc-805d4e653801","Type":"ContainerDied","Data":"a5c55d61c792112007ad928cc3f14669731037ec86a2f601b627535947adc7ba"} Sep 30 12:35:40 crc kubenswrapper[5002]: I0930 12:35:40.884925 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7fd796d7df-dkh6b" podStartSLOduration=10.884902311 podStartE2EDuration="10.884902311s" podCreationTimestamp="2025-09-30 12:35:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:35:38.867976979 +0000 UTC m=+913.117659125" watchObservedRunningTime="2025-09-30 12:35:40.884902311 +0000 UTC m=+915.134584457" Sep 30 12:35:43 crc kubenswrapper[5002]: I0930 12:35:43.523400 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Sep 30 12:35:43 crc kubenswrapper[5002]: I0930 12:35:43.878491 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5m55z" event={"ID":"9d2f8d72-e223-4f96-917e-7b47baba18d8","Type":"ContainerStarted","Data":"61e494a904d6f3f18bb8f47ab1a4ec5e82d50ca20ad1aa31f988cea98070af0d"} Sep 30 12:35:43 crc kubenswrapper[5002]: I0930 12:35:43.881162 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"1e148ee1-66bc-4300-a27a-a8c4ce515d51","Type":"ContainerStarted","Data":"ff7f5b806ce0c0afe8ac82ed19f68ac43bf6ad53488572c836a18e9f9d777d40"} Sep 30 12:35:44 crc kubenswrapper[5002]: I0930 12:35:44.888306 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-qztz9" event={"ID":"1a0f9c34-465f-4f75-af75-71b2e2d3722a","Type":"ContainerStarted","Data":"8829d51f23d3aebc0e77ae1c01b98841b398026063b0e3785b7e859bc568b975"} Sep 30 12:35:45 crc kubenswrapper[5002]: I0930 12:35:45.553631 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7fd796d7df-dkh6b" Sep 30 12:35:45 crc kubenswrapper[5002]: I0930 12:35:45.687624 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86db49b7ff-gxkxn" Sep 30 12:35:45 crc kubenswrapper[5002]: I0930 12:35:45.771606 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-dkh6b"] Sep 30 12:35:45 crc kubenswrapper[5002]: I0930 12:35:45.896755 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"e109d20d-1925-4779-bbff-50bd39214d34","Type":"ContainerStarted","Data":"b3026df7911cf040c35bd53d03a0a3d6fb381ab48f885a6cef0cdcc432a1d2c0"} Sep 30 12:35:45 crc kubenswrapper[5002]: I0930 12:35:45.903795 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7fd796d7df-dkh6b" podUID="cf7fb635-0923-43a8-b156-2a8ffc8ebd64" containerName="dnsmasq-dns" containerID="cri-o://7bb058d336a5565c8c399692edff5168050a4a1414b761ed3d67763944ca7db3" gracePeriod=10 Sep 30 12:35:45 crc kubenswrapper[5002]: I0930 12:35:45.904958 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"9d5d9337-f90f-4576-94dc-805d4e653801","Type":"ContainerStarted","Data":"afe56c172d8d9ff137000d6d0c55cab96be65c3c717d84ecba1360f913df96e5"} Sep 30 12:35:45 crc kubenswrapper[5002]: I0930 12:35:45.904985 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-5m55z" Sep 30 12:35:45 crc kubenswrapper[5002]: I0930 12:35:45.905000 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-5m55z" Sep 30 12:35:45 crc kubenswrapper[5002]: I0930 12:35:45.947611 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=9.80166778 podStartE2EDuration="21.947590521s" podCreationTimestamp="2025-09-30 12:35:24 +0000 UTC" firstStartedPulling="2025-09-30 12:35:27.352952072 +0000 UTC m=+901.602634218" lastFinishedPulling="2025-09-30 12:35:39.498874803 +0000 UTC m=+913.748556959" observedRunningTime="2025-09-30 12:35:45.944831456 +0000 UTC m=+920.194513612" watchObservedRunningTime="2025-09-30 12:35:45.947590521 +0000 UTC m=+920.197272667" Sep 30 12:35:45 crc kubenswrapper[5002]: I0930 12:35:45.948276 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=21.31529443 podStartE2EDuration="29.94827226s" podCreationTimestamp="2025-09-30 12:35:16 +0000 UTC" firstStartedPulling="2025-09-30 12:35:26.698013651 +0000 UTC m=+900.947695797" lastFinishedPulling="2025-09-30 12:35:35.330991441 +0000 UTC m=+909.580673627" observedRunningTime="2025-09-30 12:35:45.923280845 +0000 UTC m=+920.172963001" watchObservedRunningTime="2025-09-30 12:35:45.94827226 +0000 UTC m=+920.197954406" Sep 30 12:35:45 crc kubenswrapper[5002]: I0930 12:35:45.972676 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=21.436460946 podStartE2EDuration="29.972639676s" podCreationTimestamp="2025-09-30 12:35:16 +0000 UTC" firstStartedPulling="2025-09-30 12:35:27.206557584 +0000 UTC m=+901.456239730" lastFinishedPulling="2025-09-30 12:35:35.742736314 +0000 UTC m=+909.992418460" observedRunningTime="2025-09-30 12:35:45.96653523 +0000 UTC m=+920.216217386" watchObservedRunningTime="2025-09-30 12:35:45.972639676 +0000 UTC m=+920.222321822" Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.011083 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-5m55z" podStartSLOduration=15.75253201 podStartE2EDuration="23.011053949s" podCreationTimestamp="2025-09-30 12:35:23 +0000 UTC" firstStartedPulling="2025-09-30 12:35:28.389402029 +0000 UTC m=+902.639084175" lastFinishedPulling="2025-09-30 12:35:35.647923968 +0000 UTC m=+909.897606114" observedRunningTime="2025-09-30 12:35:46.008049707 +0000 UTC m=+920.257731853" watchObservedRunningTime="2025-09-30 12:35:46.011053949 +0000 UTC m=+920.260736095" Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.014166 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-qztz9" podStartSLOduration=12.874248915999999 podStartE2EDuration="16.014159693s" podCreationTimestamp="2025-09-30 12:35:30 +0000 UTC" firstStartedPulling="2025-09-30 12:35:36.417807287 +0000 UTC m=+910.667489433" lastFinishedPulling="2025-09-30 12:35:39.557718054 +0000 UTC m=+913.807400210" observedRunningTime="2025-09-30 12:35:45.989510648 +0000 UTC m=+920.239192805" watchObservedRunningTime="2025-09-30 12:35:46.014159693 +0000 UTC m=+920.263841909" Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.318708 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-dkh6b" Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.420892 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cf7fb635-0923-43a8-b156-2a8ffc8ebd64-ovsdbserver-nb\") pod \"cf7fb635-0923-43a8-b156-2a8ffc8ebd64\" (UID: \"cf7fb635-0923-43a8-b156-2a8ffc8ebd64\") " Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.420940 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf7fb635-0923-43a8-b156-2a8ffc8ebd64-config\") pod \"cf7fb635-0923-43a8-b156-2a8ffc8ebd64\" (UID: \"cf7fb635-0923-43a8-b156-2a8ffc8ebd64\") " Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.420991 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cf7fb635-0923-43a8-b156-2a8ffc8ebd64-dns-svc\") pod \"cf7fb635-0923-43a8-b156-2a8ffc8ebd64\" (UID: \"cf7fb635-0923-43a8-b156-2a8ffc8ebd64\") " Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.421031 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rz2rl\" (UniqueName: \"kubernetes.io/projected/cf7fb635-0923-43a8-b156-2a8ffc8ebd64-kube-api-access-rz2rl\") pod \"cf7fb635-0923-43a8-b156-2a8ffc8ebd64\" (UID: \"cf7fb635-0923-43a8-b156-2a8ffc8ebd64\") " Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.427326 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf7fb635-0923-43a8-b156-2a8ffc8ebd64-kube-api-access-rz2rl" (OuterVolumeSpecName: "kube-api-access-rz2rl") pod "cf7fb635-0923-43a8-b156-2a8ffc8ebd64" (UID: "cf7fb635-0923-43a8-b156-2a8ffc8ebd64"). InnerVolumeSpecName "kube-api-access-rz2rl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.463384 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf7fb635-0923-43a8-b156-2a8ffc8ebd64-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "cf7fb635-0923-43a8-b156-2a8ffc8ebd64" (UID: "cf7fb635-0923-43a8-b156-2a8ffc8ebd64"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.468526 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf7fb635-0923-43a8-b156-2a8ffc8ebd64-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "cf7fb635-0923-43a8-b156-2a8ffc8ebd64" (UID: "cf7fb635-0923-43a8-b156-2a8ffc8ebd64"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.477163 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf7fb635-0923-43a8-b156-2a8ffc8ebd64-config" (OuterVolumeSpecName: "config") pod "cf7fb635-0923-43a8-b156-2a8ffc8ebd64" (UID: "cf7fb635-0923-43a8-b156-2a8ffc8ebd64"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.522851 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rz2rl\" (UniqueName: \"kubernetes.io/projected/cf7fb635-0923-43a8-b156-2a8ffc8ebd64-kube-api-access-rz2rl\") on node \"crc\" DevicePath \"\"" Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.522893 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cf7fb635-0923-43a8-b156-2a8ffc8ebd64-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.522912 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf7fb635-0923-43a8-b156-2a8ffc8ebd64-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.522926 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cf7fb635-0923-43a8-b156-2a8ffc8ebd64-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.713161 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.757820 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.916684 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"7304f60c-c750-409f-bca4-4fd12c239891","Type":"ContainerStarted","Data":"c6a080652371cc6955d3dfc46707e4cf8f8ce5d04a70e6d196840e29cba9e86a"} Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.918925 5002 generic.go:334] "Generic (PLEG): container finished" podID="cf7fb635-0923-43a8-b156-2a8ffc8ebd64" containerID="7bb058d336a5565c8c399692edff5168050a4a1414b761ed3d67763944ca7db3" exitCode=0 Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.919046 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-dkh6b" Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.919109 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-dkh6b" event={"ID":"cf7fb635-0923-43a8-b156-2a8ffc8ebd64","Type":"ContainerDied","Data":"7bb058d336a5565c8c399692edff5168050a4a1414b761ed3d67763944ca7db3"} Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.919151 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-dkh6b" event={"ID":"cf7fb635-0923-43a8-b156-2a8ffc8ebd64","Type":"ContainerDied","Data":"a92af43ccf81b5230a8728f4dce90d3f92be7b01f7549e096a2fa287a4599806"} Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.919177 5002 scope.go:117] "RemoveContainer" containerID="7bb058d336a5565c8c399692edff5168050a4a1414b761ed3d67763944ca7db3" Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.919785 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.948453 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=3.138486658 podStartE2EDuration="20.948431313s" podCreationTimestamp="2025-09-30 12:35:26 +0000 UTC" firstStartedPulling="2025-09-30 12:35:27.944963241 +0000 UTC m=+902.194645387" lastFinishedPulling="2025-09-30 12:35:45.754907886 +0000 UTC m=+920.004590042" observedRunningTime="2025-09-30 12:35:46.942512841 +0000 UTC m=+921.192195047" watchObservedRunningTime="2025-09-30 12:35:46.948431313 +0000 UTC m=+921.198113449" Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.966375 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-dkh6b"] Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.970062 5002 scope.go:117] "RemoveContainer" containerID="450702435b5ab4b086fc2434d0ee4cb200a5f75e25300f697ac25f4ae5904abb" Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.971741 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-dkh6b"] Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.984975 5002 scope.go:117] "RemoveContainer" containerID="7bb058d336a5565c8c399692edff5168050a4a1414b761ed3d67763944ca7db3" Sep 30 12:35:46 crc kubenswrapper[5002]: E0930 12:35:46.988335 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7bb058d336a5565c8c399692edff5168050a4a1414b761ed3d67763944ca7db3\": container with ID starting with 7bb058d336a5565c8c399692edff5168050a4a1414b761ed3d67763944ca7db3 not found: ID does not exist" containerID="7bb058d336a5565c8c399692edff5168050a4a1414b761ed3d67763944ca7db3" Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.988384 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bb058d336a5565c8c399692edff5168050a4a1414b761ed3d67763944ca7db3"} err="failed to get container status \"7bb058d336a5565c8c399692edff5168050a4a1414b761ed3d67763944ca7db3\": rpc error: code = NotFound desc = could not find container \"7bb058d336a5565c8c399692edff5168050a4a1414b761ed3d67763944ca7db3\": container with ID starting with 7bb058d336a5565c8c399692edff5168050a4a1414b761ed3d67763944ca7db3 not found: ID does not exist" Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.988411 5002 scope.go:117] "RemoveContainer" containerID="450702435b5ab4b086fc2434d0ee4cb200a5f75e25300f697ac25f4ae5904abb" Sep 30 12:35:46 crc kubenswrapper[5002]: E0930 12:35:46.988953 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"450702435b5ab4b086fc2434d0ee4cb200a5f75e25300f697ac25f4ae5904abb\": container with ID starting with 450702435b5ab4b086fc2434d0ee4cb200a5f75e25300f697ac25f4ae5904abb not found: ID does not exist" containerID="450702435b5ab4b086fc2434d0ee4cb200a5f75e25300f697ac25f4ae5904abb" Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.988977 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"450702435b5ab4b086fc2434d0ee4cb200a5f75e25300f697ac25f4ae5904abb"} err="failed to get container status \"450702435b5ab4b086fc2434d0ee4cb200a5f75e25300f697ac25f4ae5904abb\": rpc error: code = NotFound desc = could not find container \"450702435b5ab4b086fc2434d0ee4cb200a5f75e25300f697ac25f4ae5904abb\": container with ID starting with 450702435b5ab4b086fc2434d0ee4cb200a5f75e25300f697ac25f4ae5904abb not found: ID does not exist" Sep 30 12:35:46 crc kubenswrapper[5002]: I0930 12:35:46.989981 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Sep 30 12:35:47 crc kubenswrapper[5002]: I0930 12:35:47.452208 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:47 crc kubenswrapper[5002]: I0930 12:35:47.951252 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Sep 30 12:35:47 crc kubenswrapper[5002]: I0930 12:35:47.951429 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Sep 30 12:35:47 crc kubenswrapper[5002]: I0930 12:35:47.962628 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:47 crc kubenswrapper[5002]: I0930 12:35:47.962710 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:48 crc kubenswrapper[5002]: E0930 12:35:48.231581 5002 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.194:42336->38.102.83.194:40383: read tcp 38.102.83.194:42336->38.102.83.194:40383: read: connection reset by peer Sep 30 12:35:48 crc kubenswrapper[5002]: I0930 12:35:48.451752 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:48 crc kubenswrapper[5002]: I0930 12:35:48.490585 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:48 crc kubenswrapper[5002]: I0930 12:35:48.685545 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf7fb635-0923-43a8-b156-2a8ffc8ebd64" path="/var/lib/kubelet/pods/cf7fb635-0923-43a8-b156-2a8ffc8ebd64/volumes" Sep 30 12:35:48 crc kubenswrapper[5002]: I0930 12:35:48.975239 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.220361 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Sep 30 12:35:49 crc kubenswrapper[5002]: E0930 12:35:49.220791 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dde3e2f9-15fd-4f59-8570-02fa37c770e0" containerName="init" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.220816 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="dde3e2f9-15fd-4f59-8570-02fa37c770e0" containerName="init" Sep 30 12:35:49 crc kubenswrapper[5002]: E0930 12:35:49.220836 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f69d7010-1d73-410f-a5f1-5aed01494f42" containerName="dnsmasq-dns" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.220844 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f69d7010-1d73-410f-a5f1-5aed01494f42" containerName="dnsmasq-dns" Sep 30 12:35:49 crc kubenswrapper[5002]: E0930 12:35:49.220853 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f69d7010-1d73-410f-a5f1-5aed01494f42" containerName="init" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.220861 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f69d7010-1d73-410f-a5f1-5aed01494f42" containerName="init" Sep 30 12:35:49 crc kubenswrapper[5002]: E0930 12:35:49.220879 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf7fb635-0923-43a8-b156-2a8ffc8ebd64" containerName="dnsmasq-dns" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.220886 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf7fb635-0923-43a8-b156-2a8ffc8ebd64" containerName="dnsmasq-dns" Sep 30 12:35:49 crc kubenswrapper[5002]: E0930 12:35:49.220909 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf7fb635-0923-43a8-b156-2a8ffc8ebd64" containerName="init" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.220916 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf7fb635-0923-43a8-b156-2a8ffc8ebd64" containerName="init" Sep 30 12:35:49 crc kubenswrapper[5002]: E0930 12:35:49.220931 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dde3e2f9-15fd-4f59-8570-02fa37c770e0" containerName="dnsmasq-dns" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.220938 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="dde3e2f9-15fd-4f59-8570-02fa37c770e0" containerName="dnsmasq-dns" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.221140 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf7fb635-0923-43a8-b156-2a8ffc8ebd64" containerName="dnsmasq-dns" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.221158 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f69d7010-1d73-410f-a5f1-5aed01494f42" containerName="dnsmasq-dns" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.221188 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="dde3e2f9-15fd-4f59-8570-02fa37c770e0" containerName="dnsmasq-dns" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.222091 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.226460 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.226656 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.226840 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.226966 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-p8v5k" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.233274 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.364551 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39ee2def-85c4-4070-9392-1f4d9fc2139c-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"39ee2def-85c4-4070-9392-1f4d9fc2139c\") " pod="openstack/ovn-northd-0" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.364857 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwmj2\" (UniqueName: \"kubernetes.io/projected/39ee2def-85c4-4070-9392-1f4d9fc2139c-kube-api-access-dwmj2\") pod \"ovn-northd-0\" (UID: \"39ee2def-85c4-4070-9392-1f4d9fc2139c\") " pod="openstack/ovn-northd-0" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.364907 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/39ee2def-85c4-4070-9392-1f4d9fc2139c-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"39ee2def-85c4-4070-9392-1f4d9fc2139c\") " pod="openstack/ovn-northd-0" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.364962 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/39ee2def-85c4-4070-9392-1f4d9fc2139c-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"39ee2def-85c4-4070-9392-1f4d9fc2139c\") " pod="openstack/ovn-northd-0" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.365009 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/39ee2def-85c4-4070-9392-1f4d9fc2139c-scripts\") pod \"ovn-northd-0\" (UID: \"39ee2def-85c4-4070-9392-1f4d9fc2139c\") " pod="openstack/ovn-northd-0" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.365056 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39ee2def-85c4-4070-9392-1f4d9fc2139c-config\") pod \"ovn-northd-0\" (UID: \"39ee2def-85c4-4070-9392-1f4d9fc2139c\") " pod="openstack/ovn-northd-0" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.365075 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/39ee2def-85c4-4070-9392-1f4d9fc2139c-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"39ee2def-85c4-4070-9392-1f4d9fc2139c\") " pod="openstack/ovn-northd-0" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.466410 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39ee2def-85c4-4070-9392-1f4d9fc2139c-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"39ee2def-85c4-4070-9392-1f4d9fc2139c\") " pod="openstack/ovn-northd-0" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.466451 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwmj2\" (UniqueName: \"kubernetes.io/projected/39ee2def-85c4-4070-9392-1f4d9fc2139c-kube-api-access-dwmj2\") pod \"ovn-northd-0\" (UID: \"39ee2def-85c4-4070-9392-1f4d9fc2139c\") " pod="openstack/ovn-northd-0" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.466507 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/39ee2def-85c4-4070-9392-1f4d9fc2139c-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"39ee2def-85c4-4070-9392-1f4d9fc2139c\") " pod="openstack/ovn-northd-0" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.466541 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/39ee2def-85c4-4070-9392-1f4d9fc2139c-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"39ee2def-85c4-4070-9392-1f4d9fc2139c\") " pod="openstack/ovn-northd-0" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.466579 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/39ee2def-85c4-4070-9392-1f4d9fc2139c-scripts\") pod \"ovn-northd-0\" (UID: \"39ee2def-85c4-4070-9392-1f4d9fc2139c\") " pod="openstack/ovn-northd-0" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.466605 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39ee2def-85c4-4070-9392-1f4d9fc2139c-config\") pod \"ovn-northd-0\" (UID: \"39ee2def-85c4-4070-9392-1f4d9fc2139c\") " pod="openstack/ovn-northd-0" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.466628 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/39ee2def-85c4-4070-9392-1f4d9fc2139c-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"39ee2def-85c4-4070-9392-1f4d9fc2139c\") " pod="openstack/ovn-northd-0" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.467022 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/39ee2def-85c4-4070-9392-1f4d9fc2139c-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"39ee2def-85c4-4070-9392-1f4d9fc2139c\") " pod="openstack/ovn-northd-0" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.467522 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39ee2def-85c4-4070-9392-1f4d9fc2139c-config\") pod \"ovn-northd-0\" (UID: \"39ee2def-85c4-4070-9392-1f4d9fc2139c\") " pod="openstack/ovn-northd-0" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.468240 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/39ee2def-85c4-4070-9392-1f4d9fc2139c-scripts\") pod \"ovn-northd-0\" (UID: \"39ee2def-85c4-4070-9392-1f4d9fc2139c\") " pod="openstack/ovn-northd-0" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.471634 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/39ee2def-85c4-4070-9392-1f4d9fc2139c-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"39ee2def-85c4-4070-9392-1f4d9fc2139c\") " pod="openstack/ovn-northd-0" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.471757 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39ee2def-85c4-4070-9392-1f4d9fc2139c-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"39ee2def-85c4-4070-9392-1f4d9fc2139c\") " pod="openstack/ovn-northd-0" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.475061 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/39ee2def-85c4-4070-9392-1f4d9fc2139c-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"39ee2def-85c4-4070-9392-1f4d9fc2139c\") " pod="openstack/ovn-northd-0" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.491671 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwmj2\" (UniqueName: \"kubernetes.io/projected/39ee2def-85c4-4070-9392-1f4d9fc2139c-kube-api-access-dwmj2\") pod \"ovn-northd-0\" (UID: \"39ee2def-85c4-4070-9392-1f4d9fc2139c\") " pod="openstack/ovn-northd-0" Sep 30 12:35:49 crc kubenswrapper[5002]: I0930 12:35:49.588428 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 30 12:35:50 crc kubenswrapper[5002]: I0930 12:35:50.013511 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 30 12:35:50 crc kubenswrapper[5002]: I0930 12:35:50.157998 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:50 crc kubenswrapper[5002]: I0930 12:35:50.223931 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Sep 30 12:35:50 crc kubenswrapper[5002]: I0930 12:35:50.345212 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 30 12:35:50 crc kubenswrapper[5002]: I0930 12:35:50.389507 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-w59f2"] Sep 30 12:35:50 crc kubenswrapper[5002]: I0930 12:35:50.390700 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-w59f2" Sep 30 12:35:50 crc kubenswrapper[5002]: I0930 12:35:50.407302 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-w59f2"] Sep 30 12:35:50 crc kubenswrapper[5002]: I0930 12:35:50.488089 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsl9g\" (UniqueName: \"kubernetes.io/projected/e48a90f9-2cc4-41fc-a164-301c62c34023-kube-api-access-hsl9g\") pod \"dnsmasq-dns-698758b865-w59f2\" (UID: \"e48a90f9-2cc4-41fc-a164-301c62c34023\") " pod="openstack/dnsmasq-dns-698758b865-w59f2" Sep 30 12:35:50 crc kubenswrapper[5002]: I0930 12:35:50.488162 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e48a90f9-2cc4-41fc-a164-301c62c34023-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-w59f2\" (UID: \"e48a90f9-2cc4-41fc-a164-301c62c34023\") " pod="openstack/dnsmasq-dns-698758b865-w59f2" Sep 30 12:35:50 crc kubenswrapper[5002]: I0930 12:35:50.488207 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e48a90f9-2cc4-41fc-a164-301c62c34023-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-w59f2\" (UID: \"e48a90f9-2cc4-41fc-a164-301c62c34023\") " pod="openstack/dnsmasq-dns-698758b865-w59f2" Sep 30 12:35:50 crc kubenswrapper[5002]: I0930 12:35:50.488230 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e48a90f9-2cc4-41fc-a164-301c62c34023-dns-svc\") pod \"dnsmasq-dns-698758b865-w59f2\" (UID: \"e48a90f9-2cc4-41fc-a164-301c62c34023\") " pod="openstack/dnsmasq-dns-698758b865-w59f2" Sep 30 12:35:50 crc kubenswrapper[5002]: I0930 12:35:50.488292 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e48a90f9-2cc4-41fc-a164-301c62c34023-config\") pod \"dnsmasq-dns-698758b865-w59f2\" (UID: \"e48a90f9-2cc4-41fc-a164-301c62c34023\") " pod="openstack/dnsmasq-dns-698758b865-w59f2" Sep 30 12:35:50 crc kubenswrapper[5002]: I0930 12:35:50.536441 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Sep 30 12:35:50 crc kubenswrapper[5002]: I0930 12:35:50.589757 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e48a90f9-2cc4-41fc-a164-301c62c34023-config\") pod \"dnsmasq-dns-698758b865-w59f2\" (UID: \"e48a90f9-2cc4-41fc-a164-301c62c34023\") " pod="openstack/dnsmasq-dns-698758b865-w59f2" Sep 30 12:35:50 crc kubenswrapper[5002]: I0930 12:35:50.589852 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsl9g\" (UniqueName: \"kubernetes.io/projected/e48a90f9-2cc4-41fc-a164-301c62c34023-kube-api-access-hsl9g\") pod \"dnsmasq-dns-698758b865-w59f2\" (UID: \"e48a90f9-2cc4-41fc-a164-301c62c34023\") " pod="openstack/dnsmasq-dns-698758b865-w59f2" Sep 30 12:35:50 crc kubenswrapper[5002]: I0930 12:35:50.589889 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e48a90f9-2cc4-41fc-a164-301c62c34023-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-w59f2\" (UID: \"e48a90f9-2cc4-41fc-a164-301c62c34023\") " pod="openstack/dnsmasq-dns-698758b865-w59f2" Sep 30 12:35:50 crc kubenswrapper[5002]: I0930 12:35:50.589923 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e48a90f9-2cc4-41fc-a164-301c62c34023-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-w59f2\" (UID: \"e48a90f9-2cc4-41fc-a164-301c62c34023\") " pod="openstack/dnsmasq-dns-698758b865-w59f2" Sep 30 12:35:50 crc kubenswrapper[5002]: I0930 12:35:50.589938 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e48a90f9-2cc4-41fc-a164-301c62c34023-dns-svc\") pod \"dnsmasq-dns-698758b865-w59f2\" (UID: \"e48a90f9-2cc4-41fc-a164-301c62c34023\") " pod="openstack/dnsmasq-dns-698758b865-w59f2" Sep 30 12:35:50 crc kubenswrapper[5002]: I0930 12:35:50.590788 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e48a90f9-2cc4-41fc-a164-301c62c34023-dns-svc\") pod \"dnsmasq-dns-698758b865-w59f2\" (UID: \"e48a90f9-2cc4-41fc-a164-301c62c34023\") " pod="openstack/dnsmasq-dns-698758b865-w59f2" Sep 30 12:35:50 crc kubenswrapper[5002]: I0930 12:35:50.591281 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e48a90f9-2cc4-41fc-a164-301c62c34023-config\") pod \"dnsmasq-dns-698758b865-w59f2\" (UID: \"e48a90f9-2cc4-41fc-a164-301c62c34023\") " pod="openstack/dnsmasq-dns-698758b865-w59f2" Sep 30 12:35:50 crc kubenswrapper[5002]: I0930 12:35:50.595772 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e48a90f9-2cc4-41fc-a164-301c62c34023-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-w59f2\" (UID: \"e48a90f9-2cc4-41fc-a164-301c62c34023\") " pod="openstack/dnsmasq-dns-698758b865-w59f2" Sep 30 12:35:50 crc kubenswrapper[5002]: I0930 12:35:50.596343 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e48a90f9-2cc4-41fc-a164-301c62c34023-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-w59f2\" (UID: \"e48a90f9-2cc4-41fc-a164-301c62c34023\") " pod="openstack/dnsmasq-dns-698758b865-w59f2" Sep 30 12:35:50 crc kubenswrapper[5002]: I0930 12:35:50.650247 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsl9g\" (UniqueName: \"kubernetes.io/projected/e48a90f9-2cc4-41fc-a164-301c62c34023-kube-api-access-hsl9g\") pod \"dnsmasq-dns-698758b865-w59f2\" (UID: \"e48a90f9-2cc4-41fc-a164-301c62c34023\") " pod="openstack/dnsmasq-dns-698758b865-w59f2" Sep 30 12:35:50 crc kubenswrapper[5002]: I0930 12:35:50.715082 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Sep 30 12:35:50 crc kubenswrapper[5002]: I0930 12:35:50.719883 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-w59f2" Sep 30 12:35:50 crc kubenswrapper[5002]: I0930 12:35:50.949172 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"39ee2def-85c4-4070-9392-1f4d9fc2139c","Type":"ContainerStarted","Data":"09f70e567823f1fb4bb3c5890696a8700c49ce61130d62474e1f59a5b59217be"} Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.196643 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-w59f2"] Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.489280 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.496311 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.498419 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.498766 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-78czg" Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.500531 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.500713 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.527408 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.614850 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b7a32bf0-614c-479f-986e-3f954c27ad1f-etc-swift\") pod \"swift-storage-0\" (UID: \"b7a32bf0-614c-479f-986e-3f954c27ad1f\") " pod="openstack/swift-storage-0" Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.614991 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"b7a32bf0-614c-479f-986e-3f954c27ad1f\") " pod="openstack/swift-storage-0" Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.615016 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/b7a32bf0-614c-479f-986e-3f954c27ad1f-cache\") pod \"swift-storage-0\" (UID: \"b7a32bf0-614c-479f-986e-3f954c27ad1f\") " pod="openstack/swift-storage-0" Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.615049 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xfpn\" (UniqueName: \"kubernetes.io/projected/b7a32bf0-614c-479f-986e-3f954c27ad1f-kube-api-access-2xfpn\") pod \"swift-storage-0\" (UID: \"b7a32bf0-614c-479f-986e-3f954c27ad1f\") " pod="openstack/swift-storage-0" Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.615106 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/b7a32bf0-614c-479f-986e-3f954c27ad1f-lock\") pod \"swift-storage-0\" (UID: \"b7a32bf0-614c-479f-986e-3f954c27ad1f\") " pod="openstack/swift-storage-0" Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.716352 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b7a32bf0-614c-479f-986e-3f954c27ad1f-etc-swift\") pod \"swift-storage-0\" (UID: \"b7a32bf0-614c-479f-986e-3f954c27ad1f\") " pod="openstack/swift-storage-0" Sep 30 12:35:51 crc kubenswrapper[5002]: E0930 12:35:51.716615 5002 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 12:35:51 crc kubenswrapper[5002]: E0930 12:35:51.716770 5002 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.716791 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"b7a32bf0-614c-479f-986e-3f954c27ad1f\") " pod="openstack/swift-storage-0" Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.716815 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/b7a32bf0-614c-479f-986e-3f954c27ad1f-cache\") pod \"swift-storage-0\" (UID: \"b7a32bf0-614c-479f-986e-3f954c27ad1f\") " pod="openstack/swift-storage-0" Sep 30 12:35:51 crc kubenswrapper[5002]: E0930 12:35:51.716839 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b7a32bf0-614c-479f-986e-3f954c27ad1f-etc-swift podName:b7a32bf0-614c-479f-986e-3f954c27ad1f nodeName:}" failed. No retries permitted until 2025-09-30 12:35:52.216815616 +0000 UTC m=+926.466497782 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b7a32bf0-614c-479f-986e-3f954c27ad1f-etc-swift") pod "swift-storage-0" (UID: "b7a32bf0-614c-479f-986e-3f954c27ad1f") : configmap "swift-ring-files" not found Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.716854 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xfpn\" (UniqueName: \"kubernetes.io/projected/b7a32bf0-614c-479f-986e-3f954c27ad1f-kube-api-access-2xfpn\") pod \"swift-storage-0\" (UID: \"b7a32bf0-614c-479f-986e-3f954c27ad1f\") " pod="openstack/swift-storage-0" Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.716913 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/b7a32bf0-614c-479f-986e-3f954c27ad1f-lock\") pod \"swift-storage-0\" (UID: \"b7a32bf0-614c-479f-986e-3f954c27ad1f\") " pod="openstack/swift-storage-0" Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.717195 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"b7a32bf0-614c-479f-986e-3f954c27ad1f\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/swift-storage-0" Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.717423 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/b7a32bf0-614c-479f-986e-3f954c27ad1f-cache\") pod \"swift-storage-0\" (UID: \"b7a32bf0-614c-479f-986e-3f954c27ad1f\") " pod="openstack/swift-storage-0" Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.717457 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/b7a32bf0-614c-479f-986e-3f954c27ad1f-lock\") pod \"swift-storage-0\" (UID: \"b7a32bf0-614c-479f-986e-3f954c27ad1f\") " pod="openstack/swift-storage-0" Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.743505 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"b7a32bf0-614c-479f-986e-3f954c27ad1f\") " pod="openstack/swift-storage-0" Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.745216 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xfpn\" (UniqueName: \"kubernetes.io/projected/b7a32bf0-614c-479f-986e-3f954c27ad1f-kube-api-access-2xfpn\") pod \"swift-storage-0\" (UID: \"b7a32bf0-614c-479f-986e-3f954c27ad1f\") " pod="openstack/swift-storage-0" Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.770824 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-hrs89"] Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.772371 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-hrs89" Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.774614 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.774801 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.775013 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.784505 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-hrs89"] Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.919455 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-dispersionconf\") pod \"swift-ring-rebalance-hrs89\" (UID: \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\") " pod="openstack/swift-ring-rebalance-hrs89" Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.919711 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-combined-ca-bundle\") pod \"swift-ring-rebalance-hrs89\" (UID: \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\") " pod="openstack/swift-ring-rebalance-hrs89" Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.919785 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-scripts\") pod \"swift-ring-rebalance-hrs89\" (UID: \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\") " pod="openstack/swift-ring-rebalance-hrs89" Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.919860 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-etc-swift\") pod \"swift-ring-rebalance-hrs89\" (UID: \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\") " pod="openstack/swift-ring-rebalance-hrs89" Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.919924 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mg2qt\" (UniqueName: \"kubernetes.io/projected/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-kube-api-access-mg2qt\") pod \"swift-ring-rebalance-hrs89\" (UID: \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\") " pod="openstack/swift-ring-rebalance-hrs89" Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.919990 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-swiftconf\") pod \"swift-ring-rebalance-hrs89\" (UID: \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\") " pod="openstack/swift-ring-rebalance-hrs89" Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.920182 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-ring-data-devices\") pod \"swift-ring-rebalance-hrs89\" (UID: \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\") " pod="openstack/swift-ring-rebalance-hrs89" Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.958607 5002 generic.go:334] "Generic (PLEG): container finished" podID="e48a90f9-2cc4-41fc-a164-301c62c34023" containerID="1dd428e29180bb044310dd229679d4a3dce7cfdab24e1d42117ed07943d20601" exitCode=0 Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.958655 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-w59f2" event={"ID":"e48a90f9-2cc4-41fc-a164-301c62c34023","Type":"ContainerDied","Data":"1dd428e29180bb044310dd229679d4a3dce7cfdab24e1d42117ed07943d20601"} Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.958922 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-w59f2" event={"ID":"e48a90f9-2cc4-41fc-a164-301c62c34023","Type":"ContainerStarted","Data":"4c7cf39b6af990c1854b42c1835d3f914ff0d8b007a3b320c9689c2523f5acde"} Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.960595 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"39ee2def-85c4-4070-9392-1f4d9fc2139c","Type":"ContainerStarted","Data":"0b47f20c517db521e1b86c971a32e39a200e4de611061d6a0f9a21c8eb75cffc"} Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.960641 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"39ee2def-85c4-4070-9392-1f4d9fc2139c","Type":"ContainerStarted","Data":"22586cdd85cb0394dbdff71015d33ede6d1a2711aa652e16b0f0dc2eb24e7e11"} Sep 30 12:35:51 crc kubenswrapper[5002]: I0930 12:35:51.960751 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Sep 30 12:35:52 crc kubenswrapper[5002]: I0930 12:35:52.021773 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mg2qt\" (UniqueName: \"kubernetes.io/projected/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-kube-api-access-mg2qt\") pod \"swift-ring-rebalance-hrs89\" (UID: \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\") " pod="openstack/swift-ring-rebalance-hrs89" Sep 30 12:35:52 crc kubenswrapper[5002]: I0930 12:35:52.021832 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-swiftconf\") pod \"swift-ring-rebalance-hrs89\" (UID: \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\") " pod="openstack/swift-ring-rebalance-hrs89" Sep 30 12:35:52 crc kubenswrapper[5002]: I0930 12:35:52.021877 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-ring-data-devices\") pod \"swift-ring-rebalance-hrs89\" (UID: \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\") " pod="openstack/swift-ring-rebalance-hrs89" Sep 30 12:35:52 crc kubenswrapper[5002]: I0930 12:35:52.021931 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-dispersionconf\") pod \"swift-ring-rebalance-hrs89\" (UID: \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\") " pod="openstack/swift-ring-rebalance-hrs89" Sep 30 12:35:52 crc kubenswrapper[5002]: I0930 12:35:52.021976 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-combined-ca-bundle\") pod \"swift-ring-rebalance-hrs89\" (UID: \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\") " pod="openstack/swift-ring-rebalance-hrs89" Sep 30 12:35:52 crc kubenswrapper[5002]: I0930 12:35:52.021991 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-scripts\") pod \"swift-ring-rebalance-hrs89\" (UID: \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\") " pod="openstack/swift-ring-rebalance-hrs89" Sep 30 12:35:52 crc kubenswrapper[5002]: I0930 12:35:52.022009 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-etc-swift\") pod \"swift-ring-rebalance-hrs89\" (UID: \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\") " pod="openstack/swift-ring-rebalance-hrs89" Sep 30 12:35:52 crc kubenswrapper[5002]: I0930 12:35:52.022820 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-ring-data-devices\") pod \"swift-ring-rebalance-hrs89\" (UID: \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\") " pod="openstack/swift-ring-rebalance-hrs89" Sep 30 12:35:52 crc kubenswrapper[5002]: I0930 12:35:52.022861 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-scripts\") pod \"swift-ring-rebalance-hrs89\" (UID: \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\") " pod="openstack/swift-ring-rebalance-hrs89" Sep 30 12:35:52 crc kubenswrapper[5002]: I0930 12:35:52.023061 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-etc-swift\") pod \"swift-ring-rebalance-hrs89\" (UID: \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\") " pod="openstack/swift-ring-rebalance-hrs89" Sep 30 12:35:52 crc kubenswrapper[5002]: I0930 12:35:52.026823 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=1.830149949 podStartE2EDuration="3.026797632s" podCreationTimestamp="2025-09-30 12:35:49 +0000 UTC" firstStartedPulling="2025-09-30 12:35:50.020098211 +0000 UTC m=+924.269780357" lastFinishedPulling="2025-09-30 12:35:51.216745874 +0000 UTC m=+925.466428040" observedRunningTime="2025-09-30 12:35:52.01283322 +0000 UTC m=+926.262515376" watchObservedRunningTime="2025-09-30 12:35:52.026797632 +0000 UTC m=+926.276479818" Sep 30 12:35:52 crc kubenswrapper[5002]: I0930 12:35:52.033894 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-swiftconf\") pod \"swift-ring-rebalance-hrs89\" (UID: \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\") " pod="openstack/swift-ring-rebalance-hrs89" Sep 30 12:35:52 crc kubenswrapper[5002]: I0930 12:35:52.034305 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-combined-ca-bundle\") pod \"swift-ring-rebalance-hrs89\" (UID: \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\") " pod="openstack/swift-ring-rebalance-hrs89" Sep 30 12:35:52 crc kubenswrapper[5002]: I0930 12:35:52.038127 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-dispersionconf\") pod \"swift-ring-rebalance-hrs89\" (UID: \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\") " pod="openstack/swift-ring-rebalance-hrs89" Sep 30 12:35:52 crc kubenswrapper[5002]: I0930 12:35:52.048603 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mg2qt\" (UniqueName: \"kubernetes.io/projected/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-kube-api-access-mg2qt\") pod \"swift-ring-rebalance-hrs89\" (UID: \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\") " pod="openstack/swift-ring-rebalance-hrs89" Sep 30 12:35:52 crc kubenswrapper[5002]: I0930 12:35:52.146453 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-hrs89" Sep 30 12:35:52 crc kubenswrapper[5002]: I0930 12:35:52.227326 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b7a32bf0-614c-479f-986e-3f954c27ad1f-etc-swift\") pod \"swift-storage-0\" (UID: \"b7a32bf0-614c-479f-986e-3f954c27ad1f\") " pod="openstack/swift-storage-0" Sep 30 12:35:52 crc kubenswrapper[5002]: E0930 12:35:52.228087 5002 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 12:35:52 crc kubenswrapper[5002]: E0930 12:35:52.228114 5002 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 12:35:52 crc kubenswrapper[5002]: E0930 12:35:52.228185 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b7a32bf0-614c-479f-986e-3f954c27ad1f-etc-swift podName:b7a32bf0-614c-479f-986e-3f954c27ad1f nodeName:}" failed. No retries permitted until 2025-09-30 12:35:53.228158275 +0000 UTC m=+927.477840451 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b7a32bf0-614c-479f-986e-3f954c27ad1f-etc-swift") pod "swift-storage-0" (UID: "b7a32bf0-614c-479f-986e-3f954c27ad1f") : configmap "swift-ring-files" not found Sep 30 12:35:52 crc kubenswrapper[5002]: I0930 12:35:52.619538 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-hrs89"] Sep 30 12:35:52 crc kubenswrapper[5002]: W0930 12:35:52.627777 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a1d9b02_1faf_4a01_82a1_d71e4c154f57.slice/crio-116eaa7a3cbdc31624aee7f536632dcfecf9c08b8af3ae69f53be3b12406f3c2 WatchSource:0}: Error finding container 116eaa7a3cbdc31624aee7f536632dcfecf9c08b8af3ae69f53be3b12406f3c2: Status 404 returned error can't find the container with id 116eaa7a3cbdc31624aee7f536632dcfecf9c08b8af3ae69f53be3b12406f3c2 Sep 30 12:35:52 crc kubenswrapper[5002]: I0930 12:35:52.971037 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-hrs89" event={"ID":"8a1d9b02-1faf-4a01-82a1-d71e4c154f57","Type":"ContainerStarted","Data":"116eaa7a3cbdc31624aee7f536632dcfecf9c08b8af3ae69f53be3b12406f3c2"} Sep 30 12:35:52 crc kubenswrapper[5002]: I0930 12:35:52.973642 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-w59f2" event={"ID":"e48a90f9-2cc4-41fc-a164-301c62c34023","Type":"ContainerStarted","Data":"59f35a48ce5079f5ab231cc92ca7cfe31fdbe753184bda03958a9823305efec5"} Sep 30 12:35:52 crc kubenswrapper[5002]: I0930 12:35:52.995332 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-w59f2" podStartSLOduration=2.995262848 podStartE2EDuration="2.995262848s" podCreationTimestamp="2025-09-30 12:35:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:35:52.992389809 +0000 UTC m=+927.242071985" watchObservedRunningTime="2025-09-30 12:35:52.995262848 +0000 UTC m=+927.244944994" Sep 30 12:35:53 crc kubenswrapper[5002]: I0930 12:35:53.245113 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b7a32bf0-614c-479f-986e-3f954c27ad1f-etc-swift\") pod \"swift-storage-0\" (UID: \"b7a32bf0-614c-479f-986e-3f954c27ad1f\") " pod="openstack/swift-storage-0" Sep 30 12:35:53 crc kubenswrapper[5002]: E0930 12:35:53.245291 5002 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 12:35:53 crc kubenswrapper[5002]: E0930 12:35:53.245324 5002 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 12:35:53 crc kubenswrapper[5002]: E0930 12:35:53.245391 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b7a32bf0-614c-479f-986e-3f954c27ad1f-etc-swift podName:b7a32bf0-614c-479f-986e-3f954c27ad1f nodeName:}" failed. No retries permitted until 2025-09-30 12:35:55.245371476 +0000 UTC m=+929.495053622 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b7a32bf0-614c-479f-986e-3f954c27ad1f-etc-swift") pod "swift-storage-0" (UID: "b7a32bf0-614c-479f-986e-3f954c27ad1f") : configmap "swift-ring-files" not found Sep 30 12:35:53 crc kubenswrapper[5002]: I0930 12:35:53.803894 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-6pbv2"] Sep 30 12:35:53 crc kubenswrapper[5002]: I0930 12:35:53.825224 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-6pbv2"] Sep 30 12:35:53 crc kubenswrapper[5002]: I0930 12:35:53.825371 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-6pbv2" Sep 30 12:35:53 crc kubenswrapper[5002]: I0930 12:35:53.956229 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndfhb\" (UniqueName: \"kubernetes.io/projected/6bc708e3-0747-40b1-a31e-33e23efd53cd-kube-api-access-ndfhb\") pod \"glance-db-create-6pbv2\" (UID: \"6bc708e3-0747-40b1-a31e-33e23efd53cd\") " pod="openstack/glance-db-create-6pbv2" Sep 30 12:35:53 crc kubenswrapper[5002]: I0930 12:35:53.984272 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-w59f2" Sep 30 12:35:54 crc kubenswrapper[5002]: I0930 12:35:54.057994 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndfhb\" (UniqueName: \"kubernetes.io/projected/6bc708e3-0747-40b1-a31e-33e23efd53cd-kube-api-access-ndfhb\") pod \"glance-db-create-6pbv2\" (UID: \"6bc708e3-0747-40b1-a31e-33e23efd53cd\") " pod="openstack/glance-db-create-6pbv2" Sep 30 12:35:54 crc kubenswrapper[5002]: I0930 12:35:54.102719 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndfhb\" (UniqueName: \"kubernetes.io/projected/6bc708e3-0747-40b1-a31e-33e23efd53cd-kube-api-access-ndfhb\") pod \"glance-db-create-6pbv2\" (UID: \"6bc708e3-0747-40b1-a31e-33e23efd53cd\") " pod="openstack/glance-db-create-6pbv2" Sep 30 12:35:54 crc kubenswrapper[5002]: I0930 12:35:54.148294 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-6pbv2" Sep 30 12:35:55 crc kubenswrapper[5002]: I0930 12:35:55.282256 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b7a32bf0-614c-479f-986e-3f954c27ad1f-etc-swift\") pod \"swift-storage-0\" (UID: \"b7a32bf0-614c-479f-986e-3f954c27ad1f\") " pod="openstack/swift-storage-0" Sep 30 12:35:55 crc kubenswrapper[5002]: E0930 12:35:55.282557 5002 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 12:35:55 crc kubenswrapper[5002]: E0930 12:35:55.282744 5002 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 12:35:55 crc kubenswrapper[5002]: E0930 12:35:55.282853 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b7a32bf0-614c-479f-986e-3f954c27ad1f-etc-swift podName:b7a32bf0-614c-479f-986e-3f954c27ad1f nodeName:}" failed. No retries permitted until 2025-09-30 12:35:59.282817879 +0000 UTC m=+933.532500055 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b7a32bf0-614c-479f-986e-3f954c27ad1f-etc-swift") pod "swift-storage-0" (UID: "b7a32bf0-614c-479f-986e-3f954c27ad1f") : configmap "swift-ring-files" not found Sep 30 12:35:56 crc kubenswrapper[5002]: I0930 12:35:56.234777 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-6pbv2"] Sep 30 12:35:56 crc kubenswrapper[5002]: W0930 12:35:56.238789 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6bc708e3_0747_40b1_a31e_33e23efd53cd.slice/crio-e15d6a01a84cc6408ea66d7cd26eeceab24ea2ba9d6b5d69383437aaf8e98f6e WatchSource:0}: Error finding container e15d6a01a84cc6408ea66d7cd26eeceab24ea2ba9d6b5d69383437aaf8e98f6e: Status 404 returned error can't find the container with id e15d6a01a84cc6408ea66d7cd26eeceab24ea2ba9d6b5d69383437aaf8e98f6e Sep 30 12:35:57 crc kubenswrapper[5002]: I0930 12:35:57.008354 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-hrs89" event={"ID":"8a1d9b02-1faf-4a01-82a1-d71e4c154f57","Type":"ContainerStarted","Data":"c276c7e0ce18f7db517251260e79c9e334fa039dd2d3947a72c22ff2aea9f99a"} Sep 30 12:35:57 crc kubenswrapper[5002]: I0930 12:35:57.010875 5002 generic.go:334] "Generic (PLEG): container finished" podID="6bc708e3-0747-40b1-a31e-33e23efd53cd" containerID="0a2e3563a6f51d6fc971cee3bbf4a53e725a1c9b662e1c1557fc4ced36606150" exitCode=0 Sep 30 12:35:57 crc kubenswrapper[5002]: I0930 12:35:57.010950 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-6pbv2" event={"ID":"6bc708e3-0747-40b1-a31e-33e23efd53cd","Type":"ContainerDied","Data":"0a2e3563a6f51d6fc971cee3bbf4a53e725a1c9b662e1c1557fc4ced36606150"} Sep 30 12:35:57 crc kubenswrapper[5002]: I0930 12:35:57.010980 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-6pbv2" event={"ID":"6bc708e3-0747-40b1-a31e-33e23efd53cd","Type":"ContainerStarted","Data":"e15d6a01a84cc6408ea66d7cd26eeceab24ea2ba9d6b5d69383437aaf8e98f6e"} Sep 30 12:35:57 crc kubenswrapper[5002]: I0930 12:35:57.028651 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-hrs89" podStartSLOduration=2.852062516 podStartE2EDuration="6.028631897s" podCreationTimestamp="2025-09-30 12:35:51 +0000 UTC" firstStartedPulling="2025-09-30 12:35:52.629544095 +0000 UTC m=+926.879226251" lastFinishedPulling="2025-09-30 12:35:55.806113476 +0000 UTC m=+930.055795632" observedRunningTime="2025-09-30 12:35:57.023357602 +0000 UTC m=+931.273039768" watchObservedRunningTime="2025-09-30 12:35:57.028631897 +0000 UTC m=+931.278314053" Sep 30 12:35:58 crc kubenswrapper[5002]: I0930 12:35:58.066086 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-gg8d9"] Sep 30 12:35:58 crc kubenswrapper[5002]: I0930 12:35:58.067449 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-gg8d9" Sep 30 12:35:58 crc kubenswrapper[5002]: I0930 12:35:58.075441 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-gg8d9"] Sep 30 12:35:58 crc kubenswrapper[5002]: I0930 12:35:58.146010 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ms7tv\" (UniqueName: \"kubernetes.io/projected/a0d9004a-7962-4f71-94bc-703b87eef783-kube-api-access-ms7tv\") pod \"keystone-db-create-gg8d9\" (UID: \"a0d9004a-7962-4f71-94bc-703b87eef783\") " pod="openstack/keystone-db-create-gg8d9" Sep 30 12:35:58 crc kubenswrapper[5002]: I0930 12:35:58.265313 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ms7tv\" (UniqueName: \"kubernetes.io/projected/a0d9004a-7962-4f71-94bc-703b87eef783-kube-api-access-ms7tv\") pod \"keystone-db-create-gg8d9\" (UID: \"a0d9004a-7962-4f71-94bc-703b87eef783\") " pod="openstack/keystone-db-create-gg8d9" Sep 30 12:35:58 crc kubenswrapper[5002]: I0930 12:35:58.291411 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ms7tv\" (UniqueName: \"kubernetes.io/projected/a0d9004a-7962-4f71-94bc-703b87eef783-kube-api-access-ms7tv\") pod \"keystone-db-create-gg8d9\" (UID: \"a0d9004a-7962-4f71-94bc-703b87eef783\") " pod="openstack/keystone-db-create-gg8d9" Sep 30 12:35:58 crc kubenswrapper[5002]: I0930 12:35:58.382122 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-6pbv2" Sep 30 12:35:58 crc kubenswrapper[5002]: I0930 12:35:58.405140 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-gg8d9" Sep 30 12:35:58 crc kubenswrapper[5002]: I0930 12:35:58.420553 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-jtdpj"] Sep 30 12:35:58 crc kubenswrapper[5002]: E0930 12:35:58.421026 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bc708e3-0747-40b1-a31e-33e23efd53cd" containerName="mariadb-database-create" Sep 30 12:35:58 crc kubenswrapper[5002]: I0930 12:35:58.421046 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bc708e3-0747-40b1-a31e-33e23efd53cd" containerName="mariadb-database-create" Sep 30 12:35:58 crc kubenswrapper[5002]: I0930 12:35:58.421261 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="6bc708e3-0747-40b1-a31e-33e23efd53cd" containerName="mariadb-database-create" Sep 30 12:35:58 crc kubenswrapper[5002]: I0930 12:35:58.422903 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-jtdpj" Sep 30 12:35:58 crc kubenswrapper[5002]: I0930 12:35:58.431595 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-jtdpj"] Sep 30 12:35:58 crc kubenswrapper[5002]: I0930 12:35:58.468599 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ndfhb\" (UniqueName: \"kubernetes.io/projected/6bc708e3-0747-40b1-a31e-33e23efd53cd-kube-api-access-ndfhb\") pod \"6bc708e3-0747-40b1-a31e-33e23efd53cd\" (UID: \"6bc708e3-0747-40b1-a31e-33e23efd53cd\") " Sep 30 12:35:58 crc kubenswrapper[5002]: I0930 12:35:58.476043 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6bc708e3-0747-40b1-a31e-33e23efd53cd-kube-api-access-ndfhb" (OuterVolumeSpecName: "kube-api-access-ndfhb") pod "6bc708e3-0747-40b1-a31e-33e23efd53cd" (UID: "6bc708e3-0747-40b1-a31e-33e23efd53cd"). InnerVolumeSpecName "kube-api-access-ndfhb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:35:58 crc kubenswrapper[5002]: I0930 12:35:58.570597 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27fv4\" (UniqueName: \"kubernetes.io/projected/bed43e1e-c233-45a1-acf8-6f590063d0c0-kube-api-access-27fv4\") pod \"placement-db-create-jtdpj\" (UID: \"bed43e1e-c233-45a1-acf8-6f590063d0c0\") " pod="openstack/placement-db-create-jtdpj" Sep 30 12:35:58 crc kubenswrapper[5002]: I0930 12:35:58.571078 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ndfhb\" (UniqueName: \"kubernetes.io/projected/6bc708e3-0747-40b1-a31e-33e23efd53cd-kube-api-access-ndfhb\") on node \"crc\" DevicePath \"\"" Sep 30 12:35:58 crc kubenswrapper[5002]: I0930 12:35:58.673159 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27fv4\" (UniqueName: \"kubernetes.io/projected/bed43e1e-c233-45a1-acf8-6f590063d0c0-kube-api-access-27fv4\") pod \"placement-db-create-jtdpj\" (UID: \"bed43e1e-c233-45a1-acf8-6f590063d0c0\") " pod="openstack/placement-db-create-jtdpj" Sep 30 12:35:58 crc kubenswrapper[5002]: I0930 12:35:58.731460 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27fv4\" (UniqueName: \"kubernetes.io/projected/bed43e1e-c233-45a1-acf8-6f590063d0c0-kube-api-access-27fv4\") pod \"placement-db-create-jtdpj\" (UID: \"bed43e1e-c233-45a1-acf8-6f590063d0c0\") " pod="openstack/placement-db-create-jtdpj" Sep 30 12:35:58 crc kubenswrapper[5002]: I0930 12:35:58.816974 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-jtdpj" Sep 30 12:35:58 crc kubenswrapper[5002]: I0930 12:35:58.843504 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-gg8d9"] Sep 30 12:35:58 crc kubenswrapper[5002]: W0930 12:35:58.851930 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda0d9004a_7962_4f71_94bc_703b87eef783.slice/crio-b4c281ef53af691c75fbb6592fe153e7f0e390d18a6e258c86d8ca2431125fab WatchSource:0}: Error finding container b4c281ef53af691c75fbb6592fe153e7f0e390d18a6e258c86d8ca2431125fab: Status 404 returned error can't find the container with id b4c281ef53af691c75fbb6592fe153e7f0e390d18a6e258c86d8ca2431125fab Sep 30 12:35:59 crc kubenswrapper[5002]: I0930 12:35:59.031127 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-gg8d9" event={"ID":"a0d9004a-7962-4f71-94bc-703b87eef783","Type":"ContainerStarted","Data":"b4c281ef53af691c75fbb6592fe153e7f0e390d18a6e258c86d8ca2431125fab"} Sep 30 12:35:59 crc kubenswrapper[5002]: I0930 12:35:59.032839 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-6pbv2" event={"ID":"6bc708e3-0747-40b1-a31e-33e23efd53cd","Type":"ContainerDied","Data":"e15d6a01a84cc6408ea66d7cd26eeceab24ea2ba9d6b5d69383437aaf8e98f6e"} Sep 30 12:35:59 crc kubenswrapper[5002]: I0930 12:35:59.032887 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e15d6a01a84cc6408ea66d7cd26eeceab24ea2ba9d6b5d69383437aaf8e98f6e" Sep 30 12:35:59 crc kubenswrapper[5002]: I0930 12:35:59.032897 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-6pbv2" Sep 30 12:35:59 crc kubenswrapper[5002]: I0930 12:35:59.299911 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-jtdpj"] Sep 30 12:35:59 crc kubenswrapper[5002]: W0930 12:35:59.316303 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbed43e1e_c233_45a1_acf8_6f590063d0c0.slice/crio-5f8ba1d91a44d15f5197ab11603430f52953371b15c2ba5d84f615ad9d3e9fb0 WatchSource:0}: Error finding container 5f8ba1d91a44d15f5197ab11603430f52953371b15c2ba5d84f615ad9d3e9fb0: Status 404 returned error can't find the container with id 5f8ba1d91a44d15f5197ab11603430f52953371b15c2ba5d84f615ad9d3e9fb0 Sep 30 12:35:59 crc kubenswrapper[5002]: I0930 12:35:59.382206 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b7a32bf0-614c-479f-986e-3f954c27ad1f-etc-swift\") pod \"swift-storage-0\" (UID: \"b7a32bf0-614c-479f-986e-3f954c27ad1f\") " pod="openstack/swift-storage-0" Sep 30 12:35:59 crc kubenswrapper[5002]: E0930 12:35:59.382449 5002 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 12:35:59 crc kubenswrapper[5002]: E0930 12:35:59.382551 5002 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 12:35:59 crc kubenswrapper[5002]: E0930 12:35:59.382612 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b7a32bf0-614c-479f-986e-3f954c27ad1f-etc-swift podName:b7a32bf0-614c-479f-986e-3f954c27ad1f nodeName:}" failed. No retries permitted until 2025-09-30 12:36:07.382588995 +0000 UTC m=+941.632271141 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b7a32bf0-614c-479f-986e-3f954c27ad1f-etc-swift") pod "swift-storage-0" (UID: "b7a32bf0-614c-479f-986e-3f954c27ad1f") : configmap "swift-ring-files" not found Sep 30 12:36:00 crc kubenswrapper[5002]: I0930 12:36:00.041421 5002 generic.go:334] "Generic (PLEG): container finished" podID="a0d9004a-7962-4f71-94bc-703b87eef783" containerID="137c9b6f15085a54fb7618646ccbc40198a92fa016da7d965d71e457a9755476" exitCode=0 Sep 30 12:36:00 crc kubenswrapper[5002]: I0930 12:36:00.041489 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-gg8d9" event={"ID":"a0d9004a-7962-4f71-94bc-703b87eef783","Type":"ContainerDied","Data":"137c9b6f15085a54fb7618646ccbc40198a92fa016da7d965d71e457a9755476"} Sep 30 12:36:00 crc kubenswrapper[5002]: I0930 12:36:00.043898 5002 generic.go:334] "Generic (PLEG): container finished" podID="bed43e1e-c233-45a1-acf8-6f590063d0c0" containerID="db30d89e04cb503d687ee295ed1aeacb7254c3e37f0e6cb897eb21d57b79825c" exitCode=0 Sep 30 12:36:00 crc kubenswrapper[5002]: I0930 12:36:00.043925 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-jtdpj" event={"ID":"bed43e1e-c233-45a1-acf8-6f590063d0c0","Type":"ContainerDied","Data":"db30d89e04cb503d687ee295ed1aeacb7254c3e37f0e6cb897eb21d57b79825c"} Sep 30 12:36:00 crc kubenswrapper[5002]: I0930 12:36:00.043941 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-jtdpj" event={"ID":"bed43e1e-c233-45a1-acf8-6f590063d0c0","Type":"ContainerStarted","Data":"5f8ba1d91a44d15f5197ab11603430f52953371b15c2ba5d84f615ad9d3e9fb0"} Sep 30 12:36:00 crc kubenswrapper[5002]: I0930 12:36:00.722621 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-w59f2" Sep 30 12:36:00 crc kubenswrapper[5002]: I0930 12:36:00.797498 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-gxkxn"] Sep 30 12:36:00 crc kubenswrapper[5002]: I0930 12:36:00.797956 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86db49b7ff-gxkxn" podUID="05a428f5-0352-4fca-af1a-a6bcd2fb227b" containerName="dnsmasq-dns" containerID="cri-o://cbc411e792f3b04777aa53906752ba038c4b07bf2256fe5e9846b56560c7dfc1" gracePeriod=10 Sep 30 12:36:01 crc kubenswrapper[5002]: I0930 12:36:01.062973 5002 generic.go:334] "Generic (PLEG): container finished" podID="05a428f5-0352-4fca-af1a-a6bcd2fb227b" containerID="cbc411e792f3b04777aa53906752ba038c4b07bf2256fe5e9846b56560c7dfc1" exitCode=0 Sep 30 12:36:01 crc kubenswrapper[5002]: I0930 12:36:01.063210 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-gxkxn" event={"ID":"05a428f5-0352-4fca-af1a-a6bcd2fb227b","Type":"ContainerDied","Data":"cbc411e792f3b04777aa53906752ba038c4b07bf2256fe5e9846b56560c7dfc1"} Sep 30 12:36:01 crc kubenswrapper[5002]: I0930 12:36:01.368610 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-gxkxn" Sep 30 12:36:01 crc kubenswrapper[5002]: I0930 12:36:01.453135 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-gg8d9" Sep 30 12:36:01 crc kubenswrapper[5002]: I0930 12:36:01.461179 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-jtdpj" Sep 30 12:36:01 crc kubenswrapper[5002]: I0930 12:36:01.565806 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ms7tv\" (UniqueName: \"kubernetes.io/projected/a0d9004a-7962-4f71-94bc-703b87eef783-kube-api-access-ms7tv\") pod \"a0d9004a-7962-4f71-94bc-703b87eef783\" (UID: \"a0d9004a-7962-4f71-94bc-703b87eef783\") " Sep 30 12:36:01 crc kubenswrapper[5002]: I0930 12:36:01.565899 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-27fv4\" (UniqueName: \"kubernetes.io/projected/bed43e1e-c233-45a1-acf8-6f590063d0c0-kube-api-access-27fv4\") pod \"bed43e1e-c233-45a1-acf8-6f590063d0c0\" (UID: \"bed43e1e-c233-45a1-acf8-6f590063d0c0\") " Sep 30 12:36:01 crc kubenswrapper[5002]: I0930 12:36:01.565945 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05a428f5-0352-4fca-af1a-a6bcd2fb227b-config\") pod \"05a428f5-0352-4fca-af1a-a6bcd2fb227b\" (UID: \"05a428f5-0352-4fca-af1a-a6bcd2fb227b\") " Sep 30 12:36:01 crc kubenswrapper[5002]: I0930 12:36:01.566019 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/05a428f5-0352-4fca-af1a-a6bcd2fb227b-dns-svc\") pod \"05a428f5-0352-4fca-af1a-a6bcd2fb227b\" (UID: \"05a428f5-0352-4fca-af1a-a6bcd2fb227b\") " Sep 30 12:36:01 crc kubenswrapper[5002]: I0930 12:36:01.566059 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/05a428f5-0352-4fca-af1a-a6bcd2fb227b-ovsdbserver-nb\") pod \"05a428f5-0352-4fca-af1a-a6bcd2fb227b\" (UID: \"05a428f5-0352-4fca-af1a-a6bcd2fb227b\") " Sep 30 12:36:01 crc kubenswrapper[5002]: I0930 12:36:01.566083 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ff4ng\" (UniqueName: \"kubernetes.io/projected/05a428f5-0352-4fca-af1a-a6bcd2fb227b-kube-api-access-ff4ng\") pod \"05a428f5-0352-4fca-af1a-a6bcd2fb227b\" (UID: \"05a428f5-0352-4fca-af1a-a6bcd2fb227b\") " Sep 30 12:36:01 crc kubenswrapper[5002]: I0930 12:36:01.566117 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/05a428f5-0352-4fca-af1a-a6bcd2fb227b-ovsdbserver-sb\") pod \"05a428f5-0352-4fca-af1a-a6bcd2fb227b\" (UID: \"05a428f5-0352-4fca-af1a-a6bcd2fb227b\") " Sep 30 12:36:01 crc kubenswrapper[5002]: I0930 12:36:01.571312 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bed43e1e-c233-45a1-acf8-6f590063d0c0-kube-api-access-27fv4" (OuterVolumeSpecName: "kube-api-access-27fv4") pod "bed43e1e-c233-45a1-acf8-6f590063d0c0" (UID: "bed43e1e-c233-45a1-acf8-6f590063d0c0"). InnerVolumeSpecName "kube-api-access-27fv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:36:01 crc kubenswrapper[5002]: I0930 12:36:01.571861 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05a428f5-0352-4fca-af1a-a6bcd2fb227b-kube-api-access-ff4ng" (OuterVolumeSpecName: "kube-api-access-ff4ng") pod "05a428f5-0352-4fca-af1a-a6bcd2fb227b" (UID: "05a428f5-0352-4fca-af1a-a6bcd2fb227b"). InnerVolumeSpecName "kube-api-access-ff4ng". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:36:01 crc kubenswrapper[5002]: I0930 12:36:01.573423 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0d9004a-7962-4f71-94bc-703b87eef783-kube-api-access-ms7tv" (OuterVolumeSpecName: "kube-api-access-ms7tv") pod "a0d9004a-7962-4f71-94bc-703b87eef783" (UID: "a0d9004a-7962-4f71-94bc-703b87eef783"). InnerVolumeSpecName "kube-api-access-ms7tv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:36:01 crc kubenswrapper[5002]: I0930 12:36:01.612197 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05a428f5-0352-4fca-af1a-a6bcd2fb227b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "05a428f5-0352-4fca-af1a-a6bcd2fb227b" (UID: "05a428f5-0352-4fca-af1a-a6bcd2fb227b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:36:01 crc kubenswrapper[5002]: I0930 12:36:01.613850 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05a428f5-0352-4fca-af1a-a6bcd2fb227b-config" (OuterVolumeSpecName: "config") pod "05a428f5-0352-4fca-af1a-a6bcd2fb227b" (UID: "05a428f5-0352-4fca-af1a-a6bcd2fb227b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:36:01 crc kubenswrapper[5002]: I0930 12:36:01.615938 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05a428f5-0352-4fca-af1a-a6bcd2fb227b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "05a428f5-0352-4fca-af1a-a6bcd2fb227b" (UID: "05a428f5-0352-4fca-af1a-a6bcd2fb227b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:36:01 crc kubenswrapper[5002]: I0930 12:36:01.625553 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05a428f5-0352-4fca-af1a-a6bcd2fb227b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "05a428f5-0352-4fca-af1a-a6bcd2fb227b" (UID: "05a428f5-0352-4fca-af1a-a6bcd2fb227b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:36:01 crc kubenswrapper[5002]: I0930 12:36:01.668792 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ms7tv\" (UniqueName: \"kubernetes.io/projected/a0d9004a-7962-4f71-94bc-703b87eef783-kube-api-access-ms7tv\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:01 crc kubenswrapper[5002]: I0930 12:36:01.668836 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-27fv4\" (UniqueName: \"kubernetes.io/projected/bed43e1e-c233-45a1-acf8-6f590063d0c0-kube-api-access-27fv4\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:01 crc kubenswrapper[5002]: I0930 12:36:01.668848 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05a428f5-0352-4fca-af1a-a6bcd2fb227b-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:01 crc kubenswrapper[5002]: I0930 12:36:01.668859 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/05a428f5-0352-4fca-af1a-a6bcd2fb227b-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:01 crc kubenswrapper[5002]: I0930 12:36:01.668872 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/05a428f5-0352-4fca-af1a-a6bcd2fb227b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:01 crc kubenswrapper[5002]: I0930 12:36:01.668885 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ff4ng\" (UniqueName: \"kubernetes.io/projected/05a428f5-0352-4fca-af1a-a6bcd2fb227b-kube-api-access-ff4ng\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:01 crc kubenswrapper[5002]: I0930 12:36:01.668897 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/05a428f5-0352-4fca-af1a-a6bcd2fb227b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:02 crc kubenswrapper[5002]: I0930 12:36:02.074771 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-gxkxn" event={"ID":"05a428f5-0352-4fca-af1a-a6bcd2fb227b","Type":"ContainerDied","Data":"2023c13c6506b06bda09964c00b9b98cd4f34dffc44b54c472db40976be71b60"} Sep 30 12:36:02 crc kubenswrapper[5002]: I0930 12:36:02.074790 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-gxkxn" Sep 30 12:36:02 crc kubenswrapper[5002]: I0930 12:36:02.074837 5002 scope.go:117] "RemoveContainer" containerID="cbc411e792f3b04777aa53906752ba038c4b07bf2256fe5e9846b56560c7dfc1" Sep 30 12:36:02 crc kubenswrapper[5002]: I0930 12:36:02.086170 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-jtdpj" event={"ID":"bed43e1e-c233-45a1-acf8-6f590063d0c0","Type":"ContainerDied","Data":"5f8ba1d91a44d15f5197ab11603430f52953371b15c2ba5d84f615ad9d3e9fb0"} Sep 30 12:36:02 crc kubenswrapper[5002]: I0930 12:36:02.086221 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f8ba1d91a44d15f5197ab11603430f52953371b15c2ba5d84f615ad9d3e9fb0" Sep 30 12:36:02 crc kubenswrapper[5002]: I0930 12:36:02.086312 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-jtdpj" Sep 30 12:36:02 crc kubenswrapper[5002]: I0930 12:36:02.098097 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:36:02 crc kubenswrapper[5002]: I0930 12:36:02.098181 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:36:02 crc kubenswrapper[5002]: I0930 12:36:02.098255 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" Sep 30 12:36:02 crc kubenswrapper[5002]: I0930 12:36:02.099812 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c1160f56b0e9b4bf2e03936ba3d26a4bdb21744142f5cdbd024372eb9b5cd6dd"} pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 12:36:02 crc kubenswrapper[5002]: I0930 12:36:02.099914 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" containerID="cri-o://c1160f56b0e9b4bf2e03936ba3d26a4bdb21744142f5cdbd024372eb9b5cd6dd" gracePeriod=600 Sep 30 12:36:02 crc kubenswrapper[5002]: I0930 12:36:02.138844 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-gg8d9" event={"ID":"a0d9004a-7962-4f71-94bc-703b87eef783","Type":"ContainerDied","Data":"b4c281ef53af691c75fbb6592fe153e7f0e390d18a6e258c86d8ca2431125fab"} Sep 30 12:36:02 crc kubenswrapper[5002]: I0930 12:36:02.138903 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b4c281ef53af691c75fbb6592fe153e7f0e390d18a6e258c86d8ca2431125fab" Sep 30 12:36:02 crc kubenswrapper[5002]: I0930 12:36:02.138996 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-gg8d9" Sep 30 12:36:02 crc kubenswrapper[5002]: I0930 12:36:02.196798 5002 scope.go:117] "RemoveContainer" containerID="2a192cd33f2b86990736c6feead1fd39975cb97e1c58aa0daa41025cc23ab0b0" Sep 30 12:36:02 crc kubenswrapper[5002]: I0930 12:36:02.203844 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-gxkxn"] Sep 30 12:36:02 crc kubenswrapper[5002]: I0930 12:36:02.219555 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-gxkxn"] Sep 30 12:36:02 crc kubenswrapper[5002]: I0930 12:36:02.687767 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05a428f5-0352-4fca-af1a-a6bcd2fb227b" path="/var/lib/kubelet/pods/05a428f5-0352-4fca-af1a-a6bcd2fb227b/volumes" Sep 30 12:36:03 crc kubenswrapper[5002]: I0930 12:36:03.149276 5002 generic.go:334] "Generic (PLEG): container finished" podID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerID="c1160f56b0e9b4bf2e03936ba3d26a4bdb21744142f5cdbd024372eb9b5cd6dd" exitCode=0 Sep 30 12:36:03 crc kubenswrapper[5002]: I0930 12:36:03.149324 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerDied","Data":"c1160f56b0e9b4bf2e03936ba3d26a4bdb21744142f5cdbd024372eb9b5cd6dd"} Sep 30 12:36:03 crc kubenswrapper[5002]: I0930 12:36:03.149764 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerStarted","Data":"9886eed05d26a67a26421b788732fc6013ba20fcc19e4bc6732960e19d9a03e8"} Sep 30 12:36:03 crc kubenswrapper[5002]: I0930 12:36:03.149807 5002 scope.go:117] "RemoveContainer" containerID="192b85eb900ab298ef92f87ae6539f9cf20c972d71035d71486bd4ab14bc6108" Sep 30 12:36:03 crc kubenswrapper[5002]: I0930 12:36:03.153065 5002 generic.go:334] "Generic (PLEG): container finished" podID="8a1d9b02-1faf-4a01-82a1-d71e4c154f57" containerID="c276c7e0ce18f7db517251260e79c9e334fa039dd2d3947a72c22ff2aea9f99a" exitCode=0 Sep 30 12:36:03 crc kubenswrapper[5002]: I0930 12:36:03.153212 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-hrs89" event={"ID":"8a1d9b02-1faf-4a01-82a1-d71e4c154f57","Type":"ContainerDied","Data":"c276c7e0ce18f7db517251260e79c9e334fa039dd2d3947a72c22ff2aea9f99a"} Sep 30 12:36:04 crc kubenswrapper[5002]: I0930 12:36:04.526982 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-hrs89" Sep 30 12:36:04 crc kubenswrapper[5002]: I0930 12:36:04.626105 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-etc-swift\") pod \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\" (UID: \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\") " Sep 30 12:36:04 crc kubenswrapper[5002]: I0930 12:36:04.626159 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-ring-data-devices\") pod \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\" (UID: \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\") " Sep 30 12:36:04 crc kubenswrapper[5002]: I0930 12:36:04.626210 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg2qt\" (UniqueName: \"kubernetes.io/projected/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-kube-api-access-mg2qt\") pod \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\" (UID: \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\") " Sep 30 12:36:04 crc kubenswrapper[5002]: I0930 12:36:04.626248 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-swiftconf\") pod \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\" (UID: \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\") " Sep 30 12:36:04 crc kubenswrapper[5002]: I0930 12:36:04.626280 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-dispersionconf\") pod \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\" (UID: \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\") " Sep 30 12:36:04 crc kubenswrapper[5002]: I0930 12:36:04.626321 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-scripts\") pod \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\" (UID: \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\") " Sep 30 12:36:04 crc kubenswrapper[5002]: I0930 12:36:04.626382 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-combined-ca-bundle\") pod \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\" (UID: \"8a1d9b02-1faf-4a01-82a1-d71e4c154f57\") " Sep 30 12:36:04 crc kubenswrapper[5002]: I0930 12:36:04.627059 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "8a1d9b02-1faf-4a01-82a1-d71e4c154f57" (UID: "8a1d9b02-1faf-4a01-82a1-d71e4c154f57"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:36:04 crc kubenswrapper[5002]: I0930 12:36:04.627741 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "8a1d9b02-1faf-4a01-82a1-d71e4c154f57" (UID: "8a1d9b02-1faf-4a01-82a1-d71e4c154f57"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:36:04 crc kubenswrapper[5002]: I0930 12:36:04.632813 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-kube-api-access-mg2qt" (OuterVolumeSpecName: "kube-api-access-mg2qt") pod "8a1d9b02-1faf-4a01-82a1-d71e4c154f57" (UID: "8a1d9b02-1faf-4a01-82a1-d71e4c154f57"). InnerVolumeSpecName "kube-api-access-mg2qt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:36:04 crc kubenswrapper[5002]: I0930 12:36:04.638963 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "8a1d9b02-1faf-4a01-82a1-d71e4c154f57" (UID: "8a1d9b02-1faf-4a01-82a1-d71e4c154f57"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:36:04 crc kubenswrapper[5002]: I0930 12:36:04.649296 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-scripts" (OuterVolumeSpecName: "scripts") pod "8a1d9b02-1faf-4a01-82a1-d71e4c154f57" (UID: "8a1d9b02-1faf-4a01-82a1-d71e4c154f57"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:36:04 crc kubenswrapper[5002]: I0930 12:36:04.655729 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8a1d9b02-1faf-4a01-82a1-d71e4c154f57" (UID: "8a1d9b02-1faf-4a01-82a1-d71e4c154f57"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:36:04 crc kubenswrapper[5002]: I0930 12:36:04.656941 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "8a1d9b02-1faf-4a01-82a1-d71e4c154f57" (UID: "8a1d9b02-1faf-4a01-82a1-d71e4c154f57"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:36:04 crc kubenswrapper[5002]: I0930 12:36:04.660992 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Sep 30 12:36:04 crc kubenswrapper[5002]: I0930 12:36:04.728141 5002 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-dispersionconf\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:04 crc kubenswrapper[5002]: I0930 12:36:04.728197 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:04 crc kubenswrapper[5002]: I0930 12:36:04.728206 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:04 crc kubenswrapper[5002]: I0930 12:36:04.728217 5002 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-etc-swift\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:04 crc kubenswrapper[5002]: I0930 12:36:04.728224 5002 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-ring-data-devices\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:04 crc kubenswrapper[5002]: I0930 12:36:04.728233 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg2qt\" (UniqueName: \"kubernetes.io/projected/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-kube-api-access-mg2qt\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:04 crc kubenswrapper[5002]: I0930 12:36:04.728243 5002 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8a1d9b02-1faf-4a01-82a1-d71e4c154f57-swiftconf\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:05 crc kubenswrapper[5002]: I0930 12:36:05.182625 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-hrs89" event={"ID":"8a1d9b02-1faf-4a01-82a1-d71e4c154f57","Type":"ContainerDied","Data":"116eaa7a3cbdc31624aee7f536632dcfecf9c08b8af3ae69f53be3b12406f3c2"} Sep 30 12:36:05 crc kubenswrapper[5002]: I0930 12:36:05.182875 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="116eaa7a3cbdc31624aee7f536632dcfecf9c08b8af3ae69f53be3b12406f3c2" Sep 30 12:36:05 crc kubenswrapper[5002]: I0930 12:36:05.182714 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-hrs89" Sep 30 12:36:07 crc kubenswrapper[5002]: I0930 12:36:07.199232 5002 generic.go:334] "Generic (PLEG): container finished" podID="26e7b906-c14a-4084-926c-2d2c7ce201be" containerID="572a006f4b69c44220f3b6f964c85c72e58db661b0e2164d4eeeae9deaff60cf" exitCode=0 Sep 30 12:36:07 crc kubenswrapper[5002]: I0930 12:36:07.199289 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"26e7b906-c14a-4084-926c-2d2c7ce201be","Type":"ContainerDied","Data":"572a006f4b69c44220f3b6f964c85c72e58db661b0e2164d4eeeae9deaff60cf"} Sep 30 12:36:07 crc kubenswrapper[5002]: I0930 12:36:07.472882 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b7a32bf0-614c-479f-986e-3f954c27ad1f-etc-swift\") pod \"swift-storage-0\" (UID: \"b7a32bf0-614c-479f-986e-3f954c27ad1f\") " pod="openstack/swift-storage-0" Sep 30 12:36:07 crc kubenswrapper[5002]: I0930 12:36:07.482523 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b7a32bf0-614c-479f-986e-3f954c27ad1f-etc-swift\") pod \"swift-storage-0\" (UID: \"b7a32bf0-614c-479f-986e-3f954c27ad1f\") " pod="openstack/swift-storage-0" Sep 30 12:36:07 crc kubenswrapper[5002]: I0930 12:36:07.711758 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.208299 5002 generic.go:334] "Generic (PLEG): container finished" podID="f80ae063-860a-4997-9c9f-57bc3a850e37" containerID="71b9c072672276657eaef2a61497bf03d34a5781bcf2885cdb55475c104f5798" exitCode=0 Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.208391 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"f80ae063-860a-4997-9c9f-57bc3a850e37","Type":"ContainerDied","Data":"71b9c072672276657eaef2a61497bf03d34a5781bcf2885cdb55475c104f5798"} Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.210739 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"26e7b906-c14a-4084-926c-2d2c7ce201be","Type":"ContainerStarted","Data":"103645d4b02ad1c1e03f2de395b434a5b876e573ea9f8681428473bc48a29e93"} Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.211209 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.263417 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.278208 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-5240-account-create-5zpq6"] Sep 30 12:36:08 crc kubenswrapper[5002]: E0930 12:36:08.280145 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05a428f5-0352-4fca-af1a-a6bcd2fb227b" containerName="dnsmasq-dns" Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.280229 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="05a428f5-0352-4fca-af1a-a6bcd2fb227b" containerName="dnsmasq-dns" Sep 30 12:36:08 crc kubenswrapper[5002]: E0930 12:36:08.280345 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a1d9b02-1faf-4a01-82a1-d71e4c154f57" containerName="swift-ring-rebalance" Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.280389 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a1d9b02-1faf-4a01-82a1-d71e4c154f57" containerName="swift-ring-rebalance" Sep 30 12:36:08 crc kubenswrapper[5002]: E0930 12:36:08.280530 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0d9004a-7962-4f71-94bc-703b87eef783" containerName="mariadb-database-create" Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.280662 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0d9004a-7962-4f71-94bc-703b87eef783" containerName="mariadb-database-create" Sep 30 12:36:08 crc kubenswrapper[5002]: E0930 12:36:08.280788 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05a428f5-0352-4fca-af1a-a6bcd2fb227b" containerName="init" Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.280797 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="05a428f5-0352-4fca-af1a-a6bcd2fb227b" containerName="init" Sep 30 12:36:08 crc kubenswrapper[5002]: E0930 12:36:08.280812 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bed43e1e-c233-45a1-acf8-6f590063d0c0" containerName="mariadb-database-create" Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.280818 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="bed43e1e-c233-45a1-acf8-6f590063d0c0" containerName="mariadb-database-create" Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.280967 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a1d9b02-1faf-4a01-82a1-d71e4c154f57" containerName="swift-ring-rebalance" Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.280978 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0d9004a-7962-4f71-94bc-703b87eef783" containerName="mariadb-database-create" Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.280989 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="bed43e1e-c233-45a1-acf8-6f590063d0c0" containerName="mariadb-database-create" Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.280999 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="05a428f5-0352-4fca-af1a-a6bcd2fb227b" containerName="dnsmasq-dns" Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.281646 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5240-account-create-5zpq6" Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.283707 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.284617 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5240-account-create-5zpq6"] Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.289724 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=47.050426586 podStartE2EDuration="55.289709718s" podCreationTimestamp="2025-09-30 12:35:13 +0000 UTC" firstStartedPulling="2025-09-30 12:35:26.697544958 +0000 UTC m=+900.947227104" lastFinishedPulling="2025-09-30 12:35:34.93682809 +0000 UTC m=+909.186510236" observedRunningTime="2025-09-30 12:36:08.286503059 +0000 UTC m=+942.536185215" watchObservedRunningTime="2025-09-30 12:36:08.289709718 +0000 UTC m=+942.539391864" Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.388826 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2wh8\" (UniqueName: \"kubernetes.io/projected/2ad5b572-2368-4cf6-9fd7-345872b76a49-kube-api-access-l2wh8\") pod \"keystone-5240-account-create-5zpq6\" (UID: \"2ad5b572-2368-4cf6-9fd7-345872b76a49\") " pod="openstack/keystone-5240-account-create-5zpq6" Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.490797 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2wh8\" (UniqueName: \"kubernetes.io/projected/2ad5b572-2368-4cf6-9fd7-345872b76a49-kube-api-access-l2wh8\") pod \"keystone-5240-account-create-5zpq6\" (UID: \"2ad5b572-2368-4cf6-9fd7-345872b76a49\") " pod="openstack/keystone-5240-account-create-5zpq6" Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.508230 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2wh8\" (UniqueName: \"kubernetes.io/projected/2ad5b572-2368-4cf6-9fd7-345872b76a49-kube-api-access-l2wh8\") pod \"keystone-5240-account-create-5zpq6\" (UID: \"2ad5b572-2368-4cf6-9fd7-345872b76a49\") " pod="openstack/keystone-5240-account-create-5zpq6" Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.539672 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-67be-account-create-cl86m"] Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.540824 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-67be-account-create-cl86m" Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.543244 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.555834 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-67be-account-create-cl86m"] Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.694281 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmtw6\" (UniqueName: \"kubernetes.io/projected/f5ec1aa9-0c47-4a59-80c4-79e9d278943d-kube-api-access-bmtw6\") pod \"placement-67be-account-create-cl86m\" (UID: \"f5ec1aa9-0c47-4a59-80c4-79e9d278943d\") " pod="openstack/placement-67be-account-create-cl86m" Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.700513 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5240-account-create-5zpq6" Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.795937 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmtw6\" (UniqueName: \"kubernetes.io/projected/f5ec1aa9-0c47-4a59-80c4-79e9d278943d-kube-api-access-bmtw6\") pod \"placement-67be-account-create-cl86m\" (UID: \"f5ec1aa9-0c47-4a59-80c4-79e9d278943d\") " pod="openstack/placement-67be-account-create-cl86m" Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.822202 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmtw6\" (UniqueName: \"kubernetes.io/projected/f5ec1aa9-0c47-4a59-80c4-79e9d278943d-kube-api-access-bmtw6\") pod \"placement-67be-account-create-cl86m\" (UID: \"f5ec1aa9-0c47-4a59-80c4-79e9d278943d\") " pod="openstack/placement-67be-account-create-cl86m" Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.864220 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-67be-account-create-cl86m" Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.907353 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-qq5zn" podUID="6b699340-4bbd-4df4-951b-9404b0545d24" containerName="ovn-controller" probeResult="failure" output=< Sep 30 12:36:08 crc kubenswrapper[5002]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Sep 30 12:36:08 crc kubenswrapper[5002]: > Sep 30 12:36:08 crc kubenswrapper[5002]: I0930 12:36:08.962631 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-5m55z" Sep 30 12:36:09 crc kubenswrapper[5002]: I0930 12:36:09.210116 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5240-account-create-5zpq6"] Sep 30 12:36:09 crc kubenswrapper[5002]: W0930 12:36:09.213264 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2ad5b572_2368_4cf6_9fd7_345872b76a49.slice/crio-2002c037b5bd2a29eb2cce57c21ed975c169a6b24de6d9e18b541fab1f201261 WatchSource:0}: Error finding container 2002c037b5bd2a29eb2cce57c21ed975c169a6b24de6d9e18b541fab1f201261: Status 404 returned error can't find the container with id 2002c037b5bd2a29eb2cce57c21ed975c169a6b24de6d9e18b541fab1f201261 Sep 30 12:36:09 crc kubenswrapper[5002]: I0930 12:36:09.239902 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"f80ae063-860a-4997-9c9f-57bc3a850e37","Type":"ContainerStarted","Data":"3ddabafa91a278a090d2fcd07f8418fee0a2ef9a8b6d78e23fa0832bc69d145c"} Sep 30 12:36:09 crc kubenswrapper[5002]: I0930 12:36:09.240077 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 30 12:36:09 crc kubenswrapper[5002]: I0930 12:36:09.243127 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7a32bf0-614c-479f-986e-3f954c27ad1f","Type":"ContainerStarted","Data":"821f7bd11f162ba58bf17078d926a37180c9b8547562b39fcce8a78b9cf3b666"} Sep 30 12:36:09 crc kubenswrapper[5002]: I0930 12:36:09.265446 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=47.80955676 podStartE2EDuration="56.265428191s" podCreationTimestamp="2025-09-30 12:35:13 +0000 UTC" firstStartedPulling="2025-09-30 12:35:27.211733466 +0000 UTC m=+901.461415612" lastFinishedPulling="2025-09-30 12:35:35.667604897 +0000 UTC m=+909.917287043" observedRunningTime="2025-09-30 12:36:09.265156354 +0000 UTC m=+943.514838520" watchObservedRunningTime="2025-09-30 12:36:09.265428191 +0000 UTC m=+943.515110337" Sep 30 12:36:09 crc kubenswrapper[5002]: I0930 12:36:09.315980 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-67be-account-create-cl86m"] Sep 30 12:36:09 crc kubenswrapper[5002]: W0930 12:36:09.330651 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf5ec1aa9_0c47_4a59_80c4_79e9d278943d.slice/crio-224f5272079d4d5d39b3278f19ffd6033028b984cad39dac7e4d59e198cf6c29 WatchSource:0}: Error finding container 224f5272079d4d5d39b3278f19ffd6033028b984cad39dac7e4d59e198cf6c29: Status 404 returned error can't find the container with id 224f5272079d4d5d39b3278f19ffd6033028b984cad39dac7e4d59e198cf6c29 Sep 30 12:36:10 crc kubenswrapper[5002]: I0930 12:36:10.252660 5002 generic.go:334] "Generic (PLEG): container finished" podID="2ad5b572-2368-4cf6-9fd7-345872b76a49" containerID="139979365d4cc97837ff1cac7738f921613c6b24c1f221a8ba1f145ef3dc3c16" exitCode=0 Sep 30 12:36:10 crc kubenswrapper[5002]: I0930 12:36:10.252728 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5240-account-create-5zpq6" event={"ID":"2ad5b572-2368-4cf6-9fd7-345872b76a49","Type":"ContainerDied","Data":"139979365d4cc97837ff1cac7738f921613c6b24c1f221a8ba1f145ef3dc3c16"} Sep 30 12:36:10 crc kubenswrapper[5002]: I0930 12:36:10.252942 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5240-account-create-5zpq6" event={"ID":"2ad5b572-2368-4cf6-9fd7-345872b76a49","Type":"ContainerStarted","Data":"2002c037b5bd2a29eb2cce57c21ed975c169a6b24de6d9e18b541fab1f201261"} Sep 30 12:36:10 crc kubenswrapper[5002]: I0930 12:36:10.254627 5002 generic.go:334] "Generic (PLEG): container finished" podID="f5ec1aa9-0c47-4a59-80c4-79e9d278943d" containerID="461ba45de84cc66f64757100ba8e07901c75bfa41cac76eefdcc7d5b972e721b" exitCode=0 Sep 30 12:36:10 crc kubenswrapper[5002]: I0930 12:36:10.254702 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-67be-account-create-cl86m" event={"ID":"f5ec1aa9-0c47-4a59-80c4-79e9d278943d","Type":"ContainerDied","Data":"461ba45de84cc66f64757100ba8e07901c75bfa41cac76eefdcc7d5b972e721b"} Sep 30 12:36:10 crc kubenswrapper[5002]: I0930 12:36:10.254732 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-67be-account-create-cl86m" event={"ID":"f5ec1aa9-0c47-4a59-80c4-79e9d278943d","Type":"ContainerStarted","Data":"224f5272079d4d5d39b3278f19ffd6033028b984cad39dac7e4d59e198cf6c29"} Sep 30 12:36:10 crc kubenswrapper[5002]: I0930 12:36:10.256353 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7a32bf0-614c-479f-986e-3f954c27ad1f","Type":"ContainerStarted","Data":"ac2b82545d29873c2ddf34a4fe3593031132b164821d64107dfdb172d4f8d935"} Sep 30 12:36:11 crc kubenswrapper[5002]: I0930 12:36:11.267211 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7a32bf0-614c-479f-986e-3f954c27ad1f","Type":"ContainerStarted","Data":"30bb5af734ab6666a71a48846be8aaa52d9f55358efe2c69a113b38367381edc"} Sep 30 12:36:11 crc kubenswrapper[5002]: I0930 12:36:11.267278 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7a32bf0-614c-479f-986e-3f954c27ad1f","Type":"ContainerStarted","Data":"d6c5762af0439aa7ea410fae84544cc1fe9481b40cf5d7d91495fb06c8f09a78"} Sep 30 12:36:11 crc kubenswrapper[5002]: I0930 12:36:11.267297 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7a32bf0-614c-479f-986e-3f954c27ad1f","Type":"ContainerStarted","Data":"d94b2d0c57797918299b1931c8a051f6ab9268a40f13363eea5a79073d38761d"} Sep 30 12:36:11 crc kubenswrapper[5002]: I0930 12:36:11.769232 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-67be-account-create-cl86m" Sep 30 12:36:11 crc kubenswrapper[5002]: I0930 12:36:11.847526 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5240-account-create-5zpq6" Sep 30 12:36:11 crc kubenswrapper[5002]: I0930 12:36:11.849156 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmtw6\" (UniqueName: \"kubernetes.io/projected/f5ec1aa9-0c47-4a59-80c4-79e9d278943d-kube-api-access-bmtw6\") pod \"f5ec1aa9-0c47-4a59-80c4-79e9d278943d\" (UID: \"f5ec1aa9-0c47-4a59-80c4-79e9d278943d\") " Sep 30 12:36:11 crc kubenswrapper[5002]: I0930 12:36:11.854354 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5ec1aa9-0c47-4a59-80c4-79e9d278943d-kube-api-access-bmtw6" (OuterVolumeSpecName: "kube-api-access-bmtw6") pod "f5ec1aa9-0c47-4a59-80c4-79e9d278943d" (UID: "f5ec1aa9-0c47-4a59-80c4-79e9d278943d"). InnerVolumeSpecName "kube-api-access-bmtw6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:36:11 crc kubenswrapper[5002]: I0930 12:36:11.950602 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l2wh8\" (UniqueName: \"kubernetes.io/projected/2ad5b572-2368-4cf6-9fd7-345872b76a49-kube-api-access-l2wh8\") pod \"2ad5b572-2368-4cf6-9fd7-345872b76a49\" (UID: \"2ad5b572-2368-4cf6-9fd7-345872b76a49\") " Sep 30 12:36:11 crc kubenswrapper[5002]: I0930 12:36:11.950975 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmtw6\" (UniqueName: \"kubernetes.io/projected/f5ec1aa9-0c47-4a59-80c4-79e9d278943d-kube-api-access-bmtw6\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:11 crc kubenswrapper[5002]: I0930 12:36:11.955125 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ad5b572-2368-4cf6-9fd7-345872b76a49-kube-api-access-l2wh8" (OuterVolumeSpecName: "kube-api-access-l2wh8") pod "2ad5b572-2368-4cf6-9fd7-345872b76a49" (UID: "2ad5b572-2368-4cf6-9fd7-345872b76a49"). InnerVolumeSpecName "kube-api-access-l2wh8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:36:12 crc kubenswrapper[5002]: I0930 12:36:12.052985 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l2wh8\" (UniqueName: \"kubernetes.io/projected/2ad5b572-2368-4cf6-9fd7-345872b76a49-kube-api-access-l2wh8\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:12 crc kubenswrapper[5002]: I0930 12:36:12.278034 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-67be-account-create-cl86m" event={"ID":"f5ec1aa9-0c47-4a59-80c4-79e9d278943d","Type":"ContainerDied","Data":"224f5272079d4d5d39b3278f19ffd6033028b984cad39dac7e4d59e198cf6c29"} Sep 30 12:36:12 crc kubenswrapper[5002]: I0930 12:36:12.278065 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-67be-account-create-cl86m" Sep 30 12:36:12 crc kubenswrapper[5002]: I0930 12:36:12.278079 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="224f5272079d4d5d39b3278f19ffd6033028b984cad39dac7e4d59e198cf6c29" Sep 30 12:36:12 crc kubenswrapper[5002]: I0930 12:36:12.279718 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5240-account-create-5zpq6" event={"ID":"2ad5b572-2368-4cf6-9fd7-345872b76a49","Type":"ContainerDied","Data":"2002c037b5bd2a29eb2cce57c21ed975c169a6b24de6d9e18b541fab1f201261"} Sep 30 12:36:12 crc kubenswrapper[5002]: I0930 12:36:12.279754 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2002c037b5bd2a29eb2cce57c21ed975c169a6b24de6d9e18b541fab1f201261" Sep 30 12:36:12 crc kubenswrapper[5002]: I0930 12:36:12.279789 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5240-account-create-5zpq6" Sep 30 12:36:13 crc kubenswrapper[5002]: I0930 12:36:13.296208 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7a32bf0-614c-479f-986e-3f954c27ad1f","Type":"ContainerStarted","Data":"addcd624de90e37ecd5e75d1fd597fc188bda669881777c807f35426b6f095b1"} Sep 30 12:36:13 crc kubenswrapper[5002]: I0930 12:36:13.296446 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7a32bf0-614c-479f-986e-3f954c27ad1f","Type":"ContainerStarted","Data":"2741d96d34a67e8425e467bef41c7dabbf92e805036f32ae0351a0bf0e34be8e"} Sep 30 12:36:13 crc kubenswrapper[5002]: I0930 12:36:13.296457 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7a32bf0-614c-479f-986e-3f954c27ad1f","Type":"ContainerStarted","Data":"8425ac403cf4a20a481426337c3a7d58d39938a0a2baf963fe9fa9ed956540c5"} Sep 30 12:36:13 crc kubenswrapper[5002]: I0930 12:36:13.846061 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-38df-account-create-cr5rj"] Sep 30 12:36:13 crc kubenswrapper[5002]: E0930 12:36:13.847317 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ad5b572-2368-4cf6-9fd7-345872b76a49" containerName="mariadb-account-create" Sep 30 12:36:13 crc kubenswrapper[5002]: I0930 12:36:13.847420 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ad5b572-2368-4cf6-9fd7-345872b76a49" containerName="mariadb-account-create" Sep 30 12:36:13 crc kubenswrapper[5002]: E0930 12:36:13.847533 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5ec1aa9-0c47-4a59-80c4-79e9d278943d" containerName="mariadb-account-create" Sep 30 12:36:13 crc kubenswrapper[5002]: I0930 12:36:13.847620 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5ec1aa9-0c47-4a59-80c4-79e9d278943d" containerName="mariadb-account-create" Sep 30 12:36:13 crc kubenswrapper[5002]: I0930 12:36:13.847885 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ad5b572-2368-4cf6-9fd7-345872b76a49" containerName="mariadb-account-create" Sep 30 12:36:13 crc kubenswrapper[5002]: I0930 12:36:13.847990 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5ec1aa9-0c47-4a59-80c4-79e9d278943d" containerName="mariadb-account-create" Sep 30 12:36:13 crc kubenswrapper[5002]: I0930 12:36:13.848729 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-38df-account-create-cr5rj" Sep 30 12:36:13 crc kubenswrapper[5002]: I0930 12:36:13.856648 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Sep 30 12:36:13 crc kubenswrapper[5002]: I0930 12:36:13.879196 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-38df-account-create-cr5rj"] Sep 30 12:36:13 crc kubenswrapper[5002]: I0930 12:36:13.915872 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-qq5zn" podUID="6b699340-4bbd-4df4-951b-9404b0545d24" containerName="ovn-controller" probeResult="failure" output=< Sep 30 12:36:13 crc kubenswrapper[5002]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Sep 30 12:36:13 crc kubenswrapper[5002]: > Sep 30 12:36:13 crc kubenswrapper[5002]: I0930 12:36:13.953199 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-5m55z" Sep 30 12:36:13 crc kubenswrapper[5002]: I0930 12:36:13.982647 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9dd4d\" (UniqueName: \"kubernetes.io/projected/199b9416-df82-435d-b362-1624462e7e79-kube-api-access-9dd4d\") pod \"glance-38df-account-create-cr5rj\" (UID: \"199b9416-df82-435d-b362-1624462e7e79\") " pod="openstack/glance-38df-account-create-cr5rj" Sep 30 12:36:14 crc kubenswrapper[5002]: I0930 12:36:14.084317 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9dd4d\" (UniqueName: \"kubernetes.io/projected/199b9416-df82-435d-b362-1624462e7e79-kube-api-access-9dd4d\") pod \"glance-38df-account-create-cr5rj\" (UID: \"199b9416-df82-435d-b362-1624462e7e79\") " pod="openstack/glance-38df-account-create-cr5rj" Sep 30 12:36:14 crc kubenswrapper[5002]: I0930 12:36:14.130032 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9dd4d\" (UniqueName: \"kubernetes.io/projected/199b9416-df82-435d-b362-1624462e7e79-kube-api-access-9dd4d\") pod \"glance-38df-account-create-cr5rj\" (UID: \"199b9416-df82-435d-b362-1624462e7e79\") " pod="openstack/glance-38df-account-create-cr5rj" Sep 30 12:36:14 crc kubenswrapper[5002]: I0930 12:36:14.159751 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-qq5zn-config-vk22c"] Sep 30 12:36:14 crc kubenswrapper[5002]: I0930 12:36:14.161195 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-qq5zn-config-vk22c" Sep 30 12:36:14 crc kubenswrapper[5002]: I0930 12:36:14.164262 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Sep 30 12:36:14 crc kubenswrapper[5002]: I0930 12:36:14.172122 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-qq5zn-config-vk22c"] Sep 30 12:36:14 crc kubenswrapper[5002]: I0930 12:36:14.190836 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-38df-account-create-cr5rj" Sep 30 12:36:14 crc kubenswrapper[5002]: I0930 12:36:14.287353 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/891d0c70-b45a-447b-b570-5630e28a6799-additional-scripts\") pod \"ovn-controller-qq5zn-config-vk22c\" (UID: \"891d0c70-b45a-447b-b570-5630e28a6799\") " pod="openstack/ovn-controller-qq5zn-config-vk22c" Sep 30 12:36:14 crc kubenswrapper[5002]: I0930 12:36:14.287812 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tch4p\" (UniqueName: \"kubernetes.io/projected/891d0c70-b45a-447b-b570-5630e28a6799-kube-api-access-tch4p\") pod \"ovn-controller-qq5zn-config-vk22c\" (UID: \"891d0c70-b45a-447b-b570-5630e28a6799\") " pod="openstack/ovn-controller-qq5zn-config-vk22c" Sep 30 12:36:14 crc kubenswrapper[5002]: I0930 12:36:14.287872 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/891d0c70-b45a-447b-b570-5630e28a6799-scripts\") pod \"ovn-controller-qq5zn-config-vk22c\" (UID: \"891d0c70-b45a-447b-b570-5630e28a6799\") " pod="openstack/ovn-controller-qq5zn-config-vk22c" Sep 30 12:36:14 crc kubenswrapper[5002]: I0930 12:36:14.288014 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/891d0c70-b45a-447b-b570-5630e28a6799-var-log-ovn\") pod \"ovn-controller-qq5zn-config-vk22c\" (UID: \"891d0c70-b45a-447b-b570-5630e28a6799\") " pod="openstack/ovn-controller-qq5zn-config-vk22c" Sep 30 12:36:14 crc kubenswrapper[5002]: I0930 12:36:14.288092 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/891d0c70-b45a-447b-b570-5630e28a6799-var-run\") pod \"ovn-controller-qq5zn-config-vk22c\" (UID: \"891d0c70-b45a-447b-b570-5630e28a6799\") " pod="openstack/ovn-controller-qq5zn-config-vk22c" Sep 30 12:36:14 crc kubenswrapper[5002]: I0930 12:36:14.288204 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/891d0c70-b45a-447b-b570-5630e28a6799-var-run-ovn\") pod \"ovn-controller-qq5zn-config-vk22c\" (UID: \"891d0c70-b45a-447b-b570-5630e28a6799\") " pod="openstack/ovn-controller-qq5zn-config-vk22c" Sep 30 12:36:14 crc kubenswrapper[5002]: I0930 12:36:14.306379 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7a32bf0-614c-479f-986e-3f954c27ad1f","Type":"ContainerStarted","Data":"bd130a40f4eea0ba3525af695216d13b587c447909c0eadc4986649ca39fdcf8"} Sep 30 12:36:14 crc kubenswrapper[5002]: I0930 12:36:14.389432 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/891d0c70-b45a-447b-b570-5630e28a6799-var-log-ovn\") pod \"ovn-controller-qq5zn-config-vk22c\" (UID: \"891d0c70-b45a-447b-b570-5630e28a6799\") " pod="openstack/ovn-controller-qq5zn-config-vk22c" Sep 30 12:36:14 crc kubenswrapper[5002]: I0930 12:36:14.389498 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/891d0c70-b45a-447b-b570-5630e28a6799-var-run\") pod \"ovn-controller-qq5zn-config-vk22c\" (UID: \"891d0c70-b45a-447b-b570-5630e28a6799\") " pod="openstack/ovn-controller-qq5zn-config-vk22c" Sep 30 12:36:14 crc kubenswrapper[5002]: I0930 12:36:14.389540 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/891d0c70-b45a-447b-b570-5630e28a6799-var-run-ovn\") pod \"ovn-controller-qq5zn-config-vk22c\" (UID: \"891d0c70-b45a-447b-b570-5630e28a6799\") " pod="openstack/ovn-controller-qq5zn-config-vk22c" Sep 30 12:36:14 crc kubenswrapper[5002]: I0930 12:36:14.389629 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/891d0c70-b45a-447b-b570-5630e28a6799-additional-scripts\") pod \"ovn-controller-qq5zn-config-vk22c\" (UID: \"891d0c70-b45a-447b-b570-5630e28a6799\") " pod="openstack/ovn-controller-qq5zn-config-vk22c" Sep 30 12:36:14 crc kubenswrapper[5002]: I0930 12:36:14.389651 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tch4p\" (UniqueName: \"kubernetes.io/projected/891d0c70-b45a-447b-b570-5630e28a6799-kube-api-access-tch4p\") pod \"ovn-controller-qq5zn-config-vk22c\" (UID: \"891d0c70-b45a-447b-b570-5630e28a6799\") " pod="openstack/ovn-controller-qq5zn-config-vk22c" Sep 30 12:36:14 crc kubenswrapper[5002]: I0930 12:36:14.389675 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/891d0c70-b45a-447b-b570-5630e28a6799-scripts\") pod \"ovn-controller-qq5zn-config-vk22c\" (UID: \"891d0c70-b45a-447b-b570-5630e28a6799\") " pod="openstack/ovn-controller-qq5zn-config-vk22c" Sep 30 12:36:14 crc kubenswrapper[5002]: I0930 12:36:14.392059 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/891d0c70-b45a-447b-b570-5630e28a6799-scripts\") pod \"ovn-controller-qq5zn-config-vk22c\" (UID: \"891d0c70-b45a-447b-b570-5630e28a6799\") " pod="openstack/ovn-controller-qq5zn-config-vk22c" Sep 30 12:36:14 crc kubenswrapper[5002]: I0930 12:36:14.392326 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/891d0c70-b45a-447b-b570-5630e28a6799-var-log-ovn\") pod \"ovn-controller-qq5zn-config-vk22c\" (UID: \"891d0c70-b45a-447b-b570-5630e28a6799\") " pod="openstack/ovn-controller-qq5zn-config-vk22c" Sep 30 12:36:14 crc kubenswrapper[5002]: I0930 12:36:14.392384 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/891d0c70-b45a-447b-b570-5630e28a6799-var-run\") pod \"ovn-controller-qq5zn-config-vk22c\" (UID: \"891d0c70-b45a-447b-b570-5630e28a6799\") " pod="openstack/ovn-controller-qq5zn-config-vk22c" Sep 30 12:36:14 crc kubenswrapper[5002]: I0930 12:36:14.392428 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/891d0c70-b45a-447b-b570-5630e28a6799-var-run-ovn\") pod \"ovn-controller-qq5zn-config-vk22c\" (UID: \"891d0c70-b45a-447b-b570-5630e28a6799\") " pod="openstack/ovn-controller-qq5zn-config-vk22c" Sep 30 12:36:14 crc kubenswrapper[5002]: I0930 12:36:14.392922 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/891d0c70-b45a-447b-b570-5630e28a6799-additional-scripts\") pod \"ovn-controller-qq5zn-config-vk22c\" (UID: \"891d0c70-b45a-447b-b570-5630e28a6799\") " pod="openstack/ovn-controller-qq5zn-config-vk22c" Sep 30 12:36:14 crc kubenswrapper[5002]: I0930 12:36:14.408506 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tch4p\" (UniqueName: \"kubernetes.io/projected/891d0c70-b45a-447b-b570-5630e28a6799-kube-api-access-tch4p\") pod \"ovn-controller-qq5zn-config-vk22c\" (UID: \"891d0c70-b45a-447b-b570-5630e28a6799\") " pod="openstack/ovn-controller-qq5zn-config-vk22c" Sep 30 12:36:14 crc kubenswrapper[5002]: I0930 12:36:14.476014 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-qq5zn-config-vk22c" Sep 30 12:36:14 crc kubenswrapper[5002]: I0930 12:36:14.935671 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-38df-account-create-cr5rj"] Sep 30 12:36:14 crc kubenswrapper[5002]: W0930 12:36:14.942398 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod199b9416_df82_435d_b362_1624462e7e79.slice/crio-1706b7af7c054373da6a608011e3b374e7b9ac89ef249a4f3b4e402f9a137dbc WatchSource:0}: Error finding container 1706b7af7c054373da6a608011e3b374e7b9ac89ef249a4f3b4e402f9a137dbc: Status 404 returned error can't find the container with id 1706b7af7c054373da6a608011e3b374e7b9ac89ef249a4f3b4e402f9a137dbc Sep 30 12:36:15 crc kubenswrapper[5002]: I0930 12:36:15.015557 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-qq5zn-config-vk22c"] Sep 30 12:36:15 crc kubenswrapper[5002]: W0930 12:36:15.022165 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod891d0c70_b45a_447b_b570_5630e28a6799.slice/crio-b8303e9990dbf469c5dc7a5f305afad4d4082b99dee28298bc35109970318fcf WatchSource:0}: Error finding container b8303e9990dbf469c5dc7a5f305afad4d4082b99dee28298bc35109970318fcf: Status 404 returned error can't find the container with id b8303e9990dbf469c5dc7a5f305afad4d4082b99dee28298bc35109970318fcf Sep 30 12:36:15 crc kubenswrapper[5002]: I0930 12:36:15.314200 5002 generic.go:334] "Generic (PLEG): container finished" podID="199b9416-df82-435d-b362-1624462e7e79" containerID="b0de66aa455981cebaaf74f42189600b23c5ea003e81de2b461ebb6c70c5896c" exitCode=0 Sep 30 12:36:15 crc kubenswrapper[5002]: I0930 12:36:15.314464 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-38df-account-create-cr5rj" event={"ID":"199b9416-df82-435d-b362-1624462e7e79","Type":"ContainerDied","Data":"b0de66aa455981cebaaf74f42189600b23c5ea003e81de2b461ebb6c70c5896c"} Sep 30 12:36:15 crc kubenswrapper[5002]: I0930 12:36:15.314667 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-38df-account-create-cr5rj" event={"ID":"199b9416-df82-435d-b362-1624462e7e79","Type":"ContainerStarted","Data":"1706b7af7c054373da6a608011e3b374e7b9ac89ef249a4f3b4e402f9a137dbc"} Sep 30 12:36:15 crc kubenswrapper[5002]: I0930 12:36:15.320629 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7a32bf0-614c-479f-986e-3f954c27ad1f","Type":"ContainerStarted","Data":"17c12521523c5eb4bc4b2394f1aed0b925c95e3fc0f2c993e168e01c1cc8f50e"} Sep 30 12:36:15 crc kubenswrapper[5002]: I0930 12:36:15.320677 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7a32bf0-614c-479f-986e-3f954c27ad1f","Type":"ContainerStarted","Data":"32ce0378795d7d8c7dc9876861b504aee83fd4eece22beacc4a3e7ef4725c7e2"} Sep 30 12:36:15 crc kubenswrapper[5002]: I0930 12:36:15.320706 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7a32bf0-614c-479f-986e-3f954c27ad1f","Type":"ContainerStarted","Data":"46111da0bc95cd7490395690c72bc9332bef1b97a19e93edb146394d76314e2e"} Sep 30 12:36:15 crc kubenswrapper[5002]: I0930 12:36:15.320716 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7a32bf0-614c-479f-986e-3f954c27ad1f","Type":"ContainerStarted","Data":"51d70fe7ef5fe5d137ef8df3395a20b0a7e7341b23fb50c2f62565d1ec11e89e"} Sep 30 12:36:15 crc kubenswrapper[5002]: I0930 12:36:15.321898 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-qq5zn-config-vk22c" event={"ID":"891d0c70-b45a-447b-b570-5630e28a6799","Type":"ContainerStarted","Data":"b8303e9990dbf469c5dc7a5f305afad4d4082b99dee28298bc35109970318fcf"} Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.332704 5002 generic.go:334] "Generic (PLEG): container finished" podID="891d0c70-b45a-447b-b570-5630e28a6799" containerID="52a3d506ba9667dd4ac3b0002ed67025ec53a1082262c1141d589c0c001cf343" exitCode=0 Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.333007 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-qq5zn-config-vk22c" event={"ID":"891d0c70-b45a-447b-b570-5630e28a6799","Type":"ContainerDied","Data":"52a3d506ba9667dd4ac3b0002ed67025ec53a1082262c1141d589c0c001cf343"} Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.340355 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7a32bf0-614c-479f-986e-3f954c27ad1f","Type":"ContainerStarted","Data":"6657e2312b14fb4e53381f94fae6e03ee3553286d6369a89f80e0d85a2b1e64d"} Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.340390 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7a32bf0-614c-479f-986e-3f954c27ad1f","Type":"ContainerStarted","Data":"992278d59d1812d0e101638edf71f2a63fbe74c98d380539043667db1bdabac3"} Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.340399 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7a32bf0-614c-479f-986e-3f954c27ad1f","Type":"ContainerStarted","Data":"b1141094534551735a520af41e34e1c1b47fc6dfe97cd5fd4402298b26d98a9a"} Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.400292 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=20.229413303 podStartE2EDuration="26.400263374s" podCreationTimestamp="2025-09-30 12:35:50 +0000 UTC" firstStartedPulling="2025-09-30 12:36:08.271139059 +0000 UTC m=+942.520821205" lastFinishedPulling="2025-09-30 12:36:14.44198913 +0000 UTC m=+948.691671276" observedRunningTime="2025-09-30 12:36:16.385645424 +0000 UTC m=+950.635327591" watchObservedRunningTime="2025-09-30 12:36:16.400263374 +0000 UTC m=+950.649945540" Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.652775 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-6kcs9"] Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.654067 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.655758 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.671141 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-6kcs9"] Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.674564 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-38df-account-create-cr5rj" Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.791287 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-6kcs9\" (UID: \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\") " pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.791359 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-6kcs9\" (UID: \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\") " pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.791383 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-6kcs9\" (UID: \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\") " pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.791449 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-6kcs9\" (UID: \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\") " pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.791595 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-config\") pod \"dnsmasq-dns-77585f5f8c-6kcs9\" (UID: \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\") " pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.791694 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4hps\" (UniqueName: \"kubernetes.io/projected/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-kube-api-access-b4hps\") pod \"dnsmasq-dns-77585f5f8c-6kcs9\" (UID: \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\") " pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.892654 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9dd4d\" (UniqueName: \"kubernetes.io/projected/199b9416-df82-435d-b362-1624462e7e79-kube-api-access-9dd4d\") pod \"199b9416-df82-435d-b362-1624462e7e79\" (UID: \"199b9416-df82-435d-b362-1624462e7e79\") " Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.892848 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4hps\" (UniqueName: \"kubernetes.io/projected/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-kube-api-access-b4hps\") pod \"dnsmasq-dns-77585f5f8c-6kcs9\" (UID: \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\") " pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.892949 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-6kcs9\" (UID: \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\") " pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.892969 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-6kcs9\" (UID: \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\") " pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.892985 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-6kcs9\" (UID: \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\") " pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.893012 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-6kcs9\" (UID: \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\") " pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.893058 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-config\") pod \"dnsmasq-dns-77585f5f8c-6kcs9\" (UID: \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\") " pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.893936 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-config\") pod \"dnsmasq-dns-77585f5f8c-6kcs9\" (UID: \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\") " pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.894363 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-6kcs9\" (UID: \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\") " pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.894366 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-6kcs9\" (UID: \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\") " pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.894367 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-6kcs9\" (UID: \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\") " pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.895119 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-6kcs9\" (UID: \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\") " pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.907299 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/199b9416-df82-435d-b362-1624462e7e79-kube-api-access-9dd4d" (OuterVolumeSpecName: "kube-api-access-9dd4d") pod "199b9416-df82-435d-b362-1624462e7e79" (UID: "199b9416-df82-435d-b362-1624462e7e79"). InnerVolumeSpecName "kube-api-access-9dd4d". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.911599 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4hps\" (UniqueName: \"kubernetes.io/projected/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-kube-api-access-b4hps\") pod \"dnsmasq-dns-77585f5f8c-6kcs9\" (UID: \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\") " pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.987582 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" Sep 30 12:36:16 crc kubenswrapper[5002]: I0930 12:36:16.995069 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9dd4d\" (UniqueName: \"kubernetes.io/projected/199b9416-df82-435d-b362-1624462e7e79-kube-api-access-9dd4d\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:17 crc kubenswrapper[5002]: I0930 12:36:17.348745 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-38df-account-create-cr5rj" event={"ID":"199b9416-df82-435d-b362-1624462e7e79","Type":"ContainerDied","Data":"1706b7af7c054373da6a608011e3b374e7b9ac89ef249a4f3b4e402f9a137dbc"} Sep 30 12:36:17 crc kubenswrapper[5002]: I0930 12:36:17.348789 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-38df-account-create-cr5rj" Sep 30 12:36:17 crc kubenswrapper[5002]: I0930 12:36:17.348796 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1706b7af7c054373da6a608011e3b374e7b9ac89ef249a4f3b4e402f9a137dbc" Sep 30 12:36:17 crc kubenswrapper[5002]: I0930 12:36:17.467632 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-6kcs9"] Sep 30 12:36:17 crc kubenswrapper[5002]: W0930 12:36:17.512341 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcd808dd7_6b41_4208_8f6e_c54bea4d22c5.slice/crio-ca0313ab28b30572e70f6693636759d6299b7e4e7c54c628545681b4afe9b858 WatchSource:0}: Error finding container ca0313ab28b30572e70f6693636759d6299b7e4e7c54c628545681b4afe9b858: Status 404 returned error can't find the container with id ca0313ab28b30572e70f6693636759d6299b7e4e7c54c628545681b4afe9b858 Sep 30 12:36:17 crc kubenswrapper[5002]: I0930 12:36:17.595138 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-qq5zn-config-vk22c" Sep 30 12:36:17 crc kubenswrapper[5002]: I0930 12:36:17.714653 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/891d0c70-b45a-447b-b570-5630e28a6799-var-log-ovn\") pod \"891d0c70-b45a-447b-b570-5630e28a6799\" (UID: \"891d0c70-b45a-447b-b570-5630e28a6799\") " Sep 30 12:36:17 crc kubenswrapper[5002]: I0930 12:36:17.714743 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/891d0c70-b45a-447b-b570-5630e28a6799-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "891d0c70-b45a-447b-b570-5630e28a6799" (UID: "891d0c70-b45a-447b-b570-5630e28a6799"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 12:36:17 crc kubenswrapper[5002]: I0930 12:36:17.714787 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/891d0c70-b45a-447b-b570-5630e28a6799-additional-scripts\") pod \"891d0c70-b45a-447b-b570-5630e28a6799\" (UID: \"891d0c70-b45a-447b-b570-5630e28a6799\") " Sep 30 12:36:17 crc kubenswrapper[5002]: I0930 12:36:17.714818 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/891d0c70-b45a-447b-b570-5630e28a6799-var-run-ovn\") pod \"891d0c70-b45a-447b-b570-5630e28a6799\" (UID: \"891d0c70-b45a-447b-b570-5630e28a6799\") " Sep 30 12:36:17 crc kubenswrapper[5002]: I0930 12:36:17.714890 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/891d0c70-b45a-447b-b570-5630e28a6799-var-run\") pod \"891d0c70-b45a-447b-b570-5630e28a6799\" (UID: \"891d0c70-b45a-447b-b570-5630e28a6799\") " Sep 30 12:36:17 crc kubenswrapper[5002]: I0930 12:36:17.714919 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/891d0c70-b45a-447b-b570-5630e28a6799-scripts\") pod \"891d0c70-b45a-447b-b570-5630e28a6799\" (UID: \"891d0c70-b45a-447b-b570-5630e28a6799\") " Sep 30 12:36:17 crc kubenswrapper[5002]: I0930 12:36:17.715018 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tch4p\" (UniqueName: \"kubernetes.io/projected/891d0c70-b45a-447b-b570-5630e28a6799-kube-api-access-tch4p\") pod \"891d0c70-b45a-447b-b570-5630e28a6799\" (UID: \"891d0c70-b45a-447b-b570-5630e28a6799\") " Sep 30 12:36:17 crc kubenswrapper[5002]: I0930 12:36:17.715431 5002 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/891d0c70-b45a-447b-b570-5630e28a6799-var-log-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:17 crc kubenswrapper[5002]: I0930 12:36:17.715850 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/891d0c70-b45a-447b-b570-5630e28a6799-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "891d0c70-b45a-447b-b570-5630e28a6799" (UID: "891d0c70-b45a-447b-b570-5630e28a6799"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 12:36:17 crc kubenswrapper[5002]: I0930 12:36:17.716303 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/891d0c70-b45a-447b-b570-5630e28a6799-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "891d0c70-b45a-447b-b570-5630e28a6799" (UID: "891d0c70-b45a-447b-b570-5630e28a6799"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:36:17 crc kubenswrapper[5002]: I0930 12:36:17.716424 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/891d0c70-b45a-447b-b570-5630e28a6799-var-run" (OuterVolumeSpecName: "var-run") pod "891d0c70-b45a-447b-b570-5630e28a6799" (UID: "891d0c70-b45a-447b-b570-5630e28a6799"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 12:36:17 crc kubenswrapper[5002]: I0930 12:36:17.717274 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/891d0c70-b45a-447b-b570-5630e28a6799-scripts" (OuterVolumeSpecName: "scripts") pod "891d0c70-b45a-447b-b570-5630e28a6799" (UID: "891d0c70-b45a-447b-b570-5630e28a6799"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:36:17 crc kubenswrapper[5002]: I0930 12:36:17.720206 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/891d0c70-b45a-447b-b570-5630e28a6799-kube-api-access-tch4p" (OuterVolumeSpecName: "kube-api-access-tch4p") pod "891d0c70-b45a-447b-b570-5630e28a6799" (UID: "891d0c70-b45a-447b-b570-5630e28a6799"). InnerVolumeSpecName "kube-api-access-tch4p". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:36:17 crc kubenswrapper[5002]: I0930 12:36:17.817148 5002 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/891d0c70-b45a-447b-b570-5630e28a6799-var-run\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:17 crc kubenswrapper[5002]: I0930 12:36:17.817186 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/891d0c70-b45a-447b-b570-5630e28a6799-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:17 crc kubenswrapper[5002]: I0930 12:36:17.817196 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tch4p\" (UniqueName: \"kubernetes.io/projected/891d0c70-b45a-447b-b570-5630e28a6799-kube-api-access-tch4p\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:17 crc kubenswrapper[5002]: I0930 12:36:17.817207 5002 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/891d0c70-b45a-447b-b570-5630e28a6799-additional-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:17 crc kubenswrapper[5002]: I0930 12:36:17.817216 5002 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/891d0c70-b45a-447b-b570-5630e28a6799-var-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:18 crc kubenswrapper[5002]: I0930 12:36:18.360113 5002 generic.go:334] "Generic (PLEG): container finished" podID="cd808dd7-6b41-4208-8f6e-c54bea4d22c5" containerID="0f63e3fb4af0a8e9d7d550e840712ee1e1f5cb91aff5bf9e33d0f769c364c436" exitCode=0 Sep 30 12:36:18 crc kubenswrapper[5002]: I0930 12:36:18.360169 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" event={"ID":"cd808dd7-6b41-4208-8f6e-c54bea4d22c5","Type":"ContainerDied","Data":"0f63e3fb4af0a8e9d7d550e840712ee1e1f5cb91aff5bf9e33d0f769c364c436"} Sep 30 12:36:18 crc kubenswrapper[5002]: I0930 12:36:18.360227 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" event={"ID":"cd808dd7-6b41-4208-8f6e-c54bea4d22c5","Type":"ContainerStarted","Data":"ca0313ab28b30572e70f6693636759d6299b7e4e7c54c628545681b4afe9b858"} Sep 30 12:36:18 crc kubenswrapper[5002]: I0930 12:36:18.362189 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-qq5zn-config-vk22c" event={"ID":"891d0c70-b45a-447b-b570-5630e28a6799","Type":"ContainerDied","Data":"b8303e9990dbf469c5dc7a5f305afad4d4082b99dee28298bc35109970318fcf"} Sep 30 12:36:18 crc kubenswrapper[5002]: I0930 12:36:18.362556 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b8303e9990dbf469c5dc7a5f305afad4d4082b99dee28298bc35109970318fcf" Sep 30 12:36:18 crc kubenswrapper[5002]: I0930 12:36:18.362235 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-qq5zn-config-vk22c" Sep 30 12:36:18 crc kubenswrapper[5002]: I0930 12:36:18.694384 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-qq5zn-config-vk22c"] Sep 30 12:36:18 crc kubenswrapper[5002]: I0930 12:36:18.700358 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-qq5zn-config-vk22c"] Sep 30 12:36:18 crc kubenswrapper[5002]: I0930 12:36:18.917205 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-cpvxh"] Sep 30 12:36:18 crc kubenswrapper[5002]: E0930 12:36:18.918168 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="891d0c70-b45a-447b-b570-5630e28a6799" containerName="ovn-config" Sep 30 12:36:18 crc kubenswrapper[5002]: I0930 12:36:18.918295 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="891d0c70-b45a-447b-b570-5630e28a6799" containerName="ovn-config" Sep 30 12:36:18 crc kubenswrapper[5002]: E0930 12:36:18.918372 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="199b9416-df82-435d-b362-1624462e7e79" containerName="mariadb-account-create" Sep 30 12:36:18 crc kubenswrapper[5002]: I0930 12:36:18.918457 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="199b9416-df82-435d-b362-1624462e7e79" containerName="mariadb-account-create" Sep 30 12:36:18 crc kubenswrapper[5002]: I0930 12:36:18.918753 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="199b9416-df82-435d-b362-1624462e7e79" containerName="mariadb-account-create" Sep 30 12:36:18 crc kubenswrapper[5002]: I0930 12:36:18.918849 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="891d0c70-b45a-447b-b570-5630e28a6799" containerName="ovn-config" Sep 30 12:36:18 crc kubenswrapper[5002]: I0930 12:36:18.919776 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-cpvxh" Sep 30 12:36:18 crc kubenswrapper[5002]: I0930 12:36:18.923071 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Sep 30 12:36:18 crc kubenswrapper[5002]: I0930 12:36:18.923361 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-4pcks" Sep 30 12:36:18 crc kubenswrapper[5002]: I0930 12:36:18.927192 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-qq5zn" Sep 30 12:36:18 crc kubenswrapper[5002]: I0930 12:36:18.932733 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-cpvxh"] Sep 30 12:36:18 crc kubenswrapper[5002]: I0930 12:36:18.958061 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76838250-6394-4471-8009-e204115dc84c-config-data\") pod \"glance-db-sync-cpvxh\" (UID: \"76838250-6394-4471-8009-e204115dc84c\") " pod="openstack/glance-db-sync-cpvxh" Sep 30 12:36:18 crc kubenswrapper[5002]: I0930 12:36:18.958160 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76838250-6394-4471-8009-e204115dc84c-combined-ca-bundle\") pod \"glance-db-sync-cpvxh\" (UID: \"76838250-6394-4471-8009-e204115dc84c\") " pod="openstack/glance-db-sync-cpvxh" Sep 30 12:36:18 crc kubenswrapper[5002]: I0930 12:36:18.959102 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5pjj\" (UniqueName: \"kubernetes.io/projected/76838250-6394-4471-8009-e204115dc84c-kube-api-access-t5pjj\") pod \"glance-db-sync-cpvxh\" (UID: \"76838250-6394-4471-8009-e204115dc84c\") " pod="openstack/glance-db-sync-cpvxh" Sep 30 12:36:18 crc kubenswrapper[5002]: I0930 12:36:18.959196 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/76838250-6394-4471-8009-e204115dc84c-db-sync-config-data\") pod \"glance-db-sync-cpvxh\" (UID: \"76838250-6394-4471-8009-e204115dc84c\") " pod="openstack/glance-db-sync-cpvxh" Sep 30 12:36:19 crc kubenswrapper[5002]: I0930 12:36:19.060569 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76838250-6394-4471-8009-e204115dc84c-combined-ca-bundle\") pod \"glance-db-sync-cpvxh\" (UID: \"76838250-6394-4471-8009-e204115dc84c\") " pod="openstack/glance-db-sync-cpvxh" Sep 30 12:36:19 crc kubenswrapper[5002]: I0930 12:36:19.060719 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5pjj\" (UniqueName: \"kubernetes.io/projected/76838250-6394-4471-8009-e204115dc84c-kube-api-access-t5pjj\") pod \"glance-db-sync-cpvxh\" (UID: \"76838250-6394-4471-8009-e204115dc84c\") " pod="openstack/glance-db-sync-cpvxh" Sep 30 12:36:19 crc kubenswrapper[5002]: I0930 12:36:19.060771 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/76838250-6394-4471-8009-e204115dc84c-db-sync-config-data\") pod \"glance-db-sync-cpvxh\" (UID: \"76838250-6394-4471-8009-e204115dc84c\") " pod="openstack/glance-db-sync-cpvxh" Sep 30 12:36:19 crc kubenswrapper[5002]: I0930 12:36:19.060806 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76838250-6394-4471-8009-e204115dc84c-config-data\") pod \"glance-db-sync-cpvxh\" (UID: \"76838250-6394-4471-8009-e204115dc84c\") " pod="openstack/glance-db-sync-cpvxh" Sep 30 12:36:19 crc kubenswrapper[5002]: I0930 12:36:19.064855 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76838250-6394-4471-8009-e204115dc84c-config-data\") pod \"glance-db-sync-cpvxh\" (UID: \"76838250-6394-4471-8009-e204115dc84c\") " pod="openstack/glance-db-sync-cpvxh" Sep 30 12:36:19 crc kubenswrapper[5002]: I0930 12:36:19.064986 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76838250-6394-4471-8009-e204115dc84c-combined-ca-bundle\") pod \"glance-db-sync-cpvxh\" (UID: \"76838250-6394-4471-8009-e204115dc84c\") " pod="openstack/glance-db-sync-cpvxh" Sep 30 12:36:19 crc kubenswrapper[5002]: I0930 12:36:19.065214 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/76838250-6394-4471-8009-e204115dc84c-db-sync-config-data\") pod \"glance-db-sync-cpvxh\" (UID: \"76838250-6394-4471-8009-e204115dc84c\") " pod="openstack/glance-db-sync-cpvxh" Sep 30 12:36:19 crc kubenswrapper[5002]: I0930 12:36:19.085849 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5pjj\" (UniqueName: \"kubernetes.io/projected/76838250-6394-4471-8009-e204115dc84c-kube-api-access-t5pjj\") pod \"glance-db-sync-cpvxh\" (UID: \"76838250-6394-4471-8009-e204115dc84c\") " pod="openstack/glance-db-sync-cpvxh" Sep 30 12:36:19 crc kubenswrapper[5002]: I0930 12:36:19.280507 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-cpvxh" Sep 30 12:36:19 crc kubenswrapper[5002]: I0930 12:36:19.383179 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" event={"ID":"cd808dd7-6b41-4208-8f6e-c54bea4d22c5","Type":"ContainerStarted","Data":"7258208ab5a087fe79abec802c051a0fafc3d3516365918b10268067678c5af2"} Sep 30 12:36:19 crc kubenswrapper[5002]: I0930 12:36:19.384550 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" Sep 30 12:36:19 crc kubenswrapper[5002]: I0930 12:36:19.406671 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" podStartSLOduration=3.406652315 podStartE2EDuration="3.406652315s" podCreationTimestamp="2025-09-30 12:36:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:36:19.400359114 +0000 UTC m=+953.650041280" watchObservedRunningTime="2025-09-30 12:36:19.406652315 +0000 UTC m=+953.656334461" Sep 30 12:36:19 crc kubenswrapper[5002]: I0930 12:36:19.801675 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-cpvxh"] Sep 30 12:36:20 crc kubenswrapper[5002]: I0930 12:36:20.393106 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-cpvxh" event={"ID":"76838250-6394-4471-8009-e204115dc84c","Type":"ContainerStarted","Data":"e90abbd4ee54cdfd11c1f971802650e3b7c2fcda5fe27df8d0694656243b037d"} Sep 30 12:36:20 crc kubenswrapper[5002]: I0930 12:36:20.685426 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="891d0c70-b45a-447b-b570-5630e28a6799" path="/var/lib/kubelet/pods/891d0c70-b45a-447b-b570-5630e28a6799/volumes" Sep 30 12:36:24 crc kubenswrapper[5002]: I0930 12:36:24.953688 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:36:25 crc kubenswrapper[5002]: I0930 12:36:25.265531 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 30 12:36:26 crc kubenswrapper[5002]: I0930 12:36:26.702795 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-crf9m"] Sep 30 12:36:26 crc kubenswrapper[5002]: I0930 12:36:26.703780 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-crf9m" Sep 30 12:36:26 crc kubenswrapper[5002]: I0930 12:36:26.732378 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-crf9m"] Sep 30 12:36:26 crc kubenswrapper[5002]: I0930 12:36:26.767479 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-lz6gs"] Sep 30 12:36:26 crc kubenswrapper[5002]: I0930 12:36:26.768610 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-lz6gs" Sep 30 12:36:26 crc kubenswrapper[5002]: I0930 12:36:26.774854 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-lz6gs"] Sep 30 12:36:26 crc kubenswrapper[5002]: I0930 12:36:26.792649 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4c7mv\" (UniqueName: \"kubernetes.io/projected/aeb00f00-434a-44d3-aa16-2ea38921064c-kube-api-access-4c7mv\") pod \"cinder-db-create-crf9m\" (UID: \"aeb00f00-434a-44d3-aa16-2ea38921064c\") " pod="openstack/cinder-db-create-crf9m" Sep 30 12:36:26 crc kubenswrapper[5002]: I0930 12:36:26.792803 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4x6nx\" (UniqueName: \"kubernetes.io/projected/b3fe9aed-b73f-410d-a50f-dbbb964f3a8c-kube-api-access-4x6nx\") pod \"barbican-db-create-lz6gs\" (UID: \"b3fe9aed-b73f-410d-a50f-dbbb964f3a8c\") " pod="openstack/barbican-db-create-lz6gs" Sep 30 12:36:26 crc kubenswrapper[5002]: I0930 12:36:26.900015 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4x6nx\" (UniqueName: \"kubernetes.io/projected/b3fe9aed-b73f-410d-a50f-dbbb964f3a8c-kube-api-access-4x6nx\") pod \"barbican-db-create-lz6gs\" (UID: \"b3fe9aed-b73f-410d-a50f-dbbb964f3a8c\") " pod="openstack/barbican-db-create-lz6gs" Sep 30 12:36:26 crc kubenswrapper[5002]: I0930 12:36:26.900104 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4c7mv\" (UniqueName: \"kubernetes.io/projected/aeb00f00-434a-44d3-aa16-2ea38921064c-kube-api-access-4c7mv\") pod \"cinder-db-create-crf9m\" (UID: \"aeb00f00-434a-44d3-aa16-2ea38921064c\") " pod="openstack/cinder-db-create-crf9m" Sep 30 12:36:26 crc kubenswrapper[5002]: I0930 12:36:26.920077 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4c7mv\" (UniqueName: \"kubernetes.io/projected/aeb00f00-434a-44d3-aa16-2ea38921064c-kube-api-access-4c7mv\") pod \"cinder-db-create-crf9m\" (UID: \"aeb00f00-434a-44d3-aa16-2ea38921064c\") " pod="openstack/cinder-db-create-crf9m" Sep 30 12:36:26 crc kubenswrapper[5002]: I0930 12:36:26.920432 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4x6nx\" (UniqueName: \"kubernetes.io/projected/b3fe9aed-b73f-410d-a50f-dbbb964f3a8c-kube-api-access-4x6nx\") pod \"barbican-db-create-lz6gs\" (UID: \"b3fe9aed-b73f-410d-a50f-dbbb964f3a8c\") " pod="openstack/barbican-db-create-lz6gs" Sep 30 12:36:26 crc kubenswrapper[5002]: I0930 12:36:26.979188 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-wq6pr"] Sep 30 12:36:26 crc kubenswrapper[5002]: I0930 12:36:26.980281 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-wq6pr" Sep 30 12:36:26 crc kubenswrapper[5002]: I0930 12:36:26.989541 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" Sep 30 12:36:27 crc kubenswrapper[5002]: I0930 12:36:27.001731 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wn4vk\" (UniqueName: \"kubernetes.io/projected/d006d9db-f5bd-4a8e-b09e-3befefe10c2a-kube-api-access-wn4vk\") pod \"neutron-db-create-wq6pr\" (UID: \"d006d9db-f5bd-4a8e-b09e-3befefe10c2a\") " pod="openstack/neutron-db-create-wq6pr" Sep 30 12:36:27 crc kubenswrapper[5002]: I0930 12:36:27.009011 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-wq6pr"] Sep 30 12:36:27 crc kubenswrapper[5002]: I0930 12:36:27.027521 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-crf9m" Sep 30 12:36:27 crc kubenswrapper[5002]: I0930 12:36:27.036786 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-2xqnk"] Sep 30 12:36:27 crc kubenswrapper[5002]: I0930 12:36:27.037780 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-2xqnk" Sep 30 12:36:27 crc kubenswrapper[5002]: I0930 12:36:27.043536 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 12:36:27 crc kubenswrapper[5002]: I0930 12:36:27.043735 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 12:36:27 crc kubenswrapper[5002]: I0930 12:36:27.043858 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 12:36:27 crc kubenswrapper[5002]: I0930 12:36:27.043882 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-n9gzw" Sep 30 12:36:27 crc kubenswrapper[5002]: I0930 12:36:27.055069 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-2xqnk"] Sep 30 12:36:27 crc kubenswrapper[5002]: I0930 12:36:27.083950 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-w59f2"] Sep 30 12:36:27 crc kubenswrapper[5002]: I0930 12:36:27.084218 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-w59f2" podUID="e48a90f9-2cc4-41fc-a164-301c62c34023" containerName="dnsmasq-dns" containerID="cri-o://59f35a48ce5079f5ab231cc92ca7cfe31fdbe753184bda03958a9823305efec5" gracePeriod=10 Sep 30 12:36:27 crc kubenswrapper[5002]: I0930 12:36:27.092688 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-lz6gs" Sep 30 12:36:27 crc kubenswrapper[5002]: I0930 12:36:27.103745 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2a8ad8b-59e6-407f-b634-38dc40374764-config-data\") pod \"keystone-db-sync-2xqnk\" (UID: \"e2a8ad8b-59e6-407f-b634-38dc40374764\") " pod="openstack/keystone-db-sync-2xqnk" Sep 30 12:36:27 crc kubenswrapper[5002]: I0930 12:36:27.103838 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tkjn\" (UniqueName: \"kubernetes.io/projected/e2a8ad8b-59e6-407f-b634-38dc40374764-kube-api-access-7tkjn\") pod \"keystone-db-sync-2xqnk\" (UID: \"e2a8ad8b-59e6-407f-b634-38dc40374764\") " pod="openstack/keystone-db-sync-2xqnk" Sep 30 12:36:27 crc kubenswrapper[5002]: I0930 12:36:27.103921 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2a8ad8b-59e6-407f-b634-38dc40374764-combined-ca-bundle\") pod \"keystone-db-sync-2xqnk\" (UID: \"e2a8ad8b-59e6-407f-b634-38dc40374764\") " pod="openstack/keystone-db-sync-2xqnk" Sep 30 12:36:27 crc kubenswrapper[5002]: I0930 12:36:27.104017 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wn4vk\" (UniqueName: \"kubernetes.io/projected/d006d9db-f5bd-4a8e-b09e-3befefe10c2a-kube-api-access-wn4vk\") pod \"neutron-db-create-wq6pr\" (UID: \"d006d9db-f5bd-4a8e-b09e-3befefe10c2a\") " pod="openstack/neutron-db-create-wq6pr" Sep 30 12:36:27 crc kubenswrapper[5002]: I0930 12:36:27.135144 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wn4vk\" (UniqueName: \"kubernetes.io/projected/d006d9db-f5bd-4a8e-b09e-3befefe10c2a-kube-api-access-wn4vk\") pod \"neutron-db-create-wq6pr\" (UID: \"d006d9db-f5bd-4a8e-b09e-3befefe10c2a\") " pod="openstack/neutron-db-create-wq6pr" Sep 30 12:36:27 crc kubenswrapper[5002]: I0930 12:36:27.205336 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2a8ad8b-59e6-407f-b634-38dc40374764-config-data\") pod \"keystone-db-sync-2xqnk\" (UID: \"e2a8ad8b-59e6-407f-b634-38dc40374764\") " pod="openstack/keystone-db-sync-2xqnk" Sep 30 12:36:27 crc kubenswrapper[5002]: I0930 12:36:27.205388 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tkjn\" (UniqueName: \"kubernetes.io/projected/e2a8ad8b-59e6-407f-b634-38dc40374764-kube-api-access-7tkjn\") pod \"keystone-db-sync-2xqnk\" (UID: \"e2a8ad8b-59e6-407f-b634-38dc40374764\") " pod="openstack/keystone-db-sync-2xqnk" Sep 30 12:36:27 crc kubenswrapper[5002]: I0930 12:36:27.205443 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2a8ad8b-59e6-407f-b634-38dc40374764-combined-ca-bundle\") pod \"keystone-db-sync-2xqnk\" (UID: \"e2a8ad8b-59e6-407f-b634-38dc40374764\") " pod="openstack/keystone-db-sync-2xqnk" Sep 30 12:36:27 crc kubenswrapper[5002]: I0930 12:36:27.208711 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2a8ad8b-59e6-407f-b634-38dc40374764-config-data\") pod \"keystone-db-sync-2xqnk\" (UID: \"e2a8ad8b-59e6-407f-b634-38dc40374764\") " pod="openstack/keystone-db-sync-2xqnk" Sep 30 12:36:27 crc kubenswrapper[5002]: I0930 12:36:27.209755 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2a8ad8b-59e6-407f-b634-38dc40374764-combined-ca-bundle\") pod \"keystone-db-sync-2xqnk\" (UID: \"e2a8ad8b-59e6-407f-b634-38dc40374764\") " pod="openstack/keystone-db-sync-2xqnk" Sep 30 12:36:27 crc kubenswrapper[5002]: I0930 12:36:27.226548 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tkjn\" (UniqueName: \"kubernetes.io/projected/e2a8ad8b-59e6-407f-b634-38dc40374764-kube-api-access-7tkjn\") pod \"keystone-db-sync-2xqnk\" (UID: \"e2a8ad8b-59e6-407f-b634-38dc40374764\") " pod="openstack/keystone-db-sync-2xqnk" Sep 30 12:36:27 crc kubenswrapper[5002]: I0930 12:36:27.312291 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-wq6pr" Sep 30 12:36:27 crc kubenswrapper[5002]: I0930 12:36:27.379546 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-2xqnk" Sep 30 12:36:27 crc kubenswrapper[5002]: I0930 12:36:27.447531 5002 generic.go:334] "Generic (PLEG): container finished" podID="e48a90f9-2cc4-41fc-a164-301c62c34023" containerID="59f35a48ce5079f5ab231cc92ca7cfe31fdbe753184bda03958a9823305efec5" exitCode=0 Sep 30 12:36:27 crc kubenswrapper[5002]: I0930 12:36:27.447569 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-w59f2" event={"ID":"e48a90f9-2cc4-41fc-a164-301c62c34023","Type":"ContainerDied","Data":"59f35a48ce5079f5ab231cc92ca7cfe31fdbe753184bda03958a9823305efec5"} Sep 30 12:36:30 crc kubenswrapper[5002]: I0930 12:36:30.721330 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-w59f2" podUID="e48a90f9-2cc4-41fc-a164-301c62c34023" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.114:5353: connect: connection refused" Sep 30 12:36:31 crc kubenswrapper[5002]: I0930 12:36:31.207369 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-w59f2" Sep 30 12:36:31 crc kubenswrapper[5002]: I0930 12:36:31.282159 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hsl9g\" (UniqueName: \"kubernetes.io/projected/e48a90f9-2cc4-41fc-a164-301c62c34023-kube-api-access-hsl9g\") pod \"e48a90f9-2cc4-41fc-a164-301c62c34023\" (UID: \"e48a90f9-2cc4-41fc-a164-301c62c34023\") " Sep 30 12:36:31 crc kubenswrapper[5002]: I0930 12:36:31.282443 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e48a90f9-2cc4-41fc-a164-301c62c34023-dns-svc\") pod \"e48a90f9-2cc4-41fc-a164-301c62c34023\" (UID: \"e48a90f9-2cc4-41fc-a164-301c62c34023\") " Sep 30 12:36:31 crc kubenswrapper[5002]: I0930 12:36:31.282754 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e48a90f9-2cc4-41fc-a164-301c62c34023-ovsdbserver-sb\") pod \"e48a90f9-2cc4-41fc-a164-301c62c34023\" (UID: \"e48a90f9-2cc4-41fc-a164-301c62c34023\") " Sep 30 12:36:31 crc kubenswrapper[5002]: I0930 12:36:31.282854 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e48a90f9-2cc4-41fc-a164-301c62c34023-config\") pod \"e48a90f9-2cc4-41fc-a164-301c62c34023\" (UID: \"e48a90f9-2cc4-41fc-a164-301c62c34023\") " Sep 30 12:36:31 crc kubenswrapper[5002]: I0930 12:36:31.282886 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e48a90f9-2cc4-41fc-a164-301c62c34023-ovsdbserver-nb\") pod \"e48a90f9-2cc4-41fc-a164-301c62c34023\" (UID: \"e48a90f9-2cc4-41fc-a164-301c62c34023\") " Sep 30 12:36:31 crc kubenswrapper[5002]: I0930 12:36:31.286393 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e48a90f9-2cc4-41fc-a164-301c62c34023-kube-api-access-hsl9g" (OuterVolumeSpecName: "kube-api-access-hsl9g") pod "e48a90f9-2cc4-41fc-a164-301c62c34023" (UID: "e48a90f9-2cc4-41fc-a164-301c62c34023"). InnerVolumeSpecName "kube-api-access-hsl9g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:36:31 crc kubenswrapper[5002]: I0930 12:36:31.325930 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e48a90f9-2cc4-41fc-a164-301c62c34023-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e48a90f9-2cc4-41fc-a164-301c62c34023" (UID: "e48a90f9-2cc4-41fc-a164-301c62c34023"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:36:31 crc kubenswrapper[5002]: I0930 12:36:31.326080 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e48a90f9-2cc4-41fc-a164-301c62c34023-config" (OuterVolumeSpecName: "config") pod "e48a90f9-2cc4-41fc-a164-301c62c34023" (UID: "e48a90f9-2cc4-41fc-a164-301c62c34023"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:36:31 crc kubenswrapper[5002]: I0930 12:36:31.328969 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e48a90f9-2cc4-41fc-a164-301c62c34023-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e48a90f9-2cc4-41fc-a164-301c62c34023" (UID: "e48a90f9-2cc4-41fc-a164-301c62c34023"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:36:31 crc kubenswrapper[5002]: I0930 12:36:31.337083 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e48a90f9-2cc4-41fc-a164-301c62c34023-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e48a90f9-2cc4-41fc-a164-301c62c34023" (UID: "e48a90f9-2cc4-41fc-a164-301c62c34023"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:36:31 crc kubenswrapper[5002]: I0930 12:36:31.384711 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hsl9g\" (UniqueName: \"kubernetes.io/projected/e48a90f9-2cc4-41fc-a164-301c62c34023-kube-api-access-hsl9g\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:31 crc kubenswrapper[5002]: I0930 12:36:31.384746 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e48a90f9-2cc4-41fc-a164-301c62c34023-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:31 crc kubenswrapper[5002]: I0930 12:36:31.384759 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e48a90f9-2cc4-41fc-a164-301c62c34023-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:31 crc kubenswrapper[5002]: I0930 12:36:31.384771 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e48a90f9-2cc4-41fc-a164-301c62c34023-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:31 crc kubenswrapper[5002]: I0930 12:36:31.384783 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e48a90f9-2cc4-41fc-a164-301c62c34023-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:31 crc kubenswrapper[5002]: I0930 12:36:31.499051 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-2xqnk"] Sep 30 12:36:31 crc kubenswrapper[5002]: W0930 12:36:31.499275 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode2a8ad8b_59e6_407f_b634_38dc40374764.slice/crio-effdf52fac7595d41603f6a2ab7739f7f1abd0de2ece68d4b5abcb7b950907c7 WatchSource:0}: Error finding container effdf52fac7595d41603f6a2ab7739f7f1abd0de2ece68d4b5abcb7b950907c7: Status 404 returned error can't find the container with id effdf52fac7595d41603f6a2ab7739f7f1abd0de2ece68d4b5abcb7b950907c7 Sep 30 12:36:31 crc kubenswrapper[5002]: I0930 12:36:31.502731 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-w59f2" event={"ID":"e48a90f9-2cc4-41fc-a164-301c62c34023","Type":"ContainerDied","Data":"4c7cf39b6af990c1854b42c1835d3f914ff0d8b007a3b320c9689c2523f5acde"} Sep 30 12:36:31 crc kubenswrapper[5002]: I0930 12:36:31.502792 5002 scope.go:117] "RemoveContainer" containerID="59f35a48ce5079f5ab231cc92ca7cfe31fdbe753184bda03958a9823305efec5" Sep 30 12:36:31 crc kubenswrapper[5002]: I0930 12:36:31.502936 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-w59f2" Sep 30 12:36:31 crc kubenswrapper[5002]: I0930 12:36:31.507050 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-wq6pr"] Sep 30 12:36:31 crc kubenswrapper[5002]: I0930 12:36:31.513814 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-crf9m"] Sep 30 12:36:31 crc kubenswrapper[5002]: I0930 12:36:31.534850 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-w59f2"] Sep 30 12:36:31 crc kubenswrapper[5002]: I0930 12:36:31.541736 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-w59f2"] Sep 30 12:36:31 crc kubenswrapper[5002]: I0930 12:36:31.542757 5002 scope.go:117] "RemoveContainer" containerID="1dd428e29180bb044310dd229679d4a3dce7cfdab24e1d42117ed07943d20601" Sep 30 12:36:31 crc kubenswrapper[5002]: I0930 12:36:31.645569 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-lz6gs"] Sep 30 12:36:31 crc kubenswrapper[5002]: W0930 12:36:31.650998 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb3fe9aed_b73f_410d_a50f_dbbb964f3a8c.slice/crio-2ca8dc21cb129e027ff3053781552710c5c23fec82a2dd7148c06316fc10bbd9 WatchSource:0}: Error finding container 2ca8dc21cb129e027ff3053781552710c5c23fec82a2dd7148c06316fc10bbd9: Status 404 returned error can't find the container with id 2ca8dc21cb129e027ff3053781552710c5c23fec82a2dd7148c06316fc10bbd9 Sep 30 12:36:32 crc kubenswrapper[5002]: I0930 12:36:32.511919 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-2xqnk" event={"ID":"e2a8ad8b-59e6-407f-b634-38dc40374764","Type":"ContainerStarted","Data":"effdf52fac7595d41603f6a2ab7739f7f1abd0de2ece68d4b5abcb7b950907c7"} Sep 30 12:36:32 crc kubenswrapper[5002]: I0930 12:36:32.515589 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-cpvxh" event={"ID":"76838250-6394-4471-8009-e204115dc84c","Type":"ContainerStarted","Data":"c62def32dc5b26d5fce6bf5e1d5c485cde81cbb6939baf1fee77af7a4c03cc63"} Sep 30 12:36:32 crc kubenswrapper[5002]: I0930 12:36:32.520108 5002 generic.go:334] "Generic (PLEG): container finished" podID="b3fe9aed-b73f-410d-a50f-dbbb964f3a8c" containerID="4090f4eb041f661c6d248abe0600dc8051b07f9fa8e3d353c4686ba756a60352" exitCode=0 Sep 30 12:36:32 crc kubenswrapper[5002]: I0930 12:36:32.520362 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-lz6gs" event={"ID":"b3fe9aed-b73f-410d-a50f-dbbb964f3a8c","Type":"ContainerDied","Data":"4090f4eb041f661c6d248abe0600dc8051b07f9fa8e3d353c4686ba756a60352"} Sep 30 12:36:32 crc kubenswrapper[5002]: I0930 12:36:32.520634 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-lz6gs" event={"ID":"b3fe9aed-b73f-410d-a50f-dbbb964f3a8c","Type":"ContainerStarted","Data":"2ca8dc21cb129e027ff3053781552710c5c23fec82a2dd7148c06316fc10bbd9"} Sep 30 12:36:32 crc kubenswrapper[5002]: I0930 12:36:32.529603 5002 generic.go:334] "Generic (PLEG): container finished" podID="aeb00f00-434a-44d3-aa16-2ea38921064c" containerID="bc58330b98b69f3fe76c78e4c9bb9f5b212dd7f5f18d1d02b9b7a4234d078dd5" exitCode=0 Sep 30 12:36:32 crc kubenswrapper[5002]: I0930 12:36:32.529700 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-crf9m" event={"ID":"aeb00f00-434a-44d3-aa16-2ea38921064c","Type":"ContainerDied","Data":"bc58330b98b69f3fe76c78e4c9bb9f5b212dd7f5f18d1d02b9b7a4234d078dd5"} Sep 30 12:36:32 crc kubenswrapper[5002]: I0930 12:36:32.529740 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-crf9m" event={"ID":"aeb00f00-434a-44d3-aa16-2ea38921064c","Type":"ContainerStarted","Data":"6b894db1ca93186dc303d307bbccecd5e84d7d7afba00fb33f381d3e28863e41"} Sep 30 12:36:32 crc kubenswrapper[5002]: I0930 12:36:32.531927 5002 generic.go:334] "Generic (PLEG): container finished" podID="d006d9db-f5bd-4a8e-b09e-3befefe10c2a" containerID="10bc1990208a4da672704ee040a99cee3e574636e52d778178741af4c257aeab" exitCode=0 Sep 30 12:36:32 crc kubenswrapper[5002]: I0930 12:36:32.531965 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-wq6pr" event={"ID":"d006d9db-f5bd-4a8e-b09e-3befefe10c2a","Type":"ContainerDied","Data":"10bc1990208a4da672704ee040a99cee3e574636e52d778178741af4c257aeab"} Sep 30 12:36:32 crc kubenswrapper[5002]: I0930 12:36:32.531983 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-wq6pr" event={"ID":"d006d9db-f5bd-4a8e-b09e-3befefe10c2a","Type":"ContainerStarted","Data":"4e2dde852f42100f5b1f888782d64c8b82108485c202012633b0fd9451c2a3ad"} Sep 30 12:36:32 crc kubenswrapper[5002]: I0930 12:36:32.540845 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-cpvxh" podStartSLOduration=3.255778803 podStartE2EDuration="14.540822333s" podCreationTimestamp="2025-09-30 12:36:18 +0000 UTC" firstStartedPulling="2025-09-30 12:36:19.805098615 +0000 UTC m=+954.054780761" lastFinishedPulling="2025-09-30 12:36:31.090142145 +0000 UTC m=+965.339824291" observedRunningTime="2025-09-30 12:36:32.535716273 +0000 UTC m=+966.785398439" watchObservedRunningTime="2025-09-30 12:36:32.540822333 +0000 UTC m=+966.790504479" Sep 30 12:36:32 crc kubenswrapper[5002]: I0930 12:36:32.687376 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e48a90f9-2cc4-41fc-a164-301c62c34023" path="/var/lib/kubelet/pods/e48a90f9-2cc4-41fc-a164-301c62c34023/volumes" Sep 30 12:36:35 crc kubenswrapper[5002]: I0930 12:36:35.510322 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-wq6pr" Sep 30 12:36:35 crc kubenswrapper[5002]: I0930 12:36:35.528766 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-lz6gs" Sep 30 12:36:35 crc kubenswrapper[5002]: I0930 12:36:35.568304 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-lz6gs" Sep 30 12:36:35 crc kubenswrapper[5002]: I0930 12:36:35.568318 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-lz6gs" event={"ID":"b3fe9aed-b73f-410d-a50f-dbbb964f3a8c","Type":"ContainerDied","Data":"2ca8dc21cb129e027ff3053781552710c5c23fec82a2dd7148c06316fc10bbd9"} Sep 30 12:36:35 crc kubenswrapper[5002]: I0930 12:36:35.568377 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2ca8dc21cb129e027ff3053781552710c5c23fec82a2dd7148c06316fc10bbd9" Sep 30 12:36:35 crc kubenswrapper[5002]: I0930 12:36:35.572911 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-crf9m" event={"ID":"aeb00f00-434a-44d3-aa16-2ea38921064c","Type":"ContainerDied","Data":"6b894db1ca93186dc303d307bbccecd5e84d7d7afba00fb33f381d3e28863e41"} Sep 30 12:36:35 crc kubenswrapper[5002]: I0930 12:36:35.573697 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b894db1ca93186dc303d307bbccecd5e84d7d7afba00fb33f381d3e28863e41" Sep 30 12:36:35 crc kubenswrapper[5002]: I0930 12:36:35.576189 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-wq6pr" event={"ID":"d006d9db-f5bd-4a8e-b09e-3befefe10c2a","Type":"ContainerDied","Data":"4e2dde852f42100f5b1f888782d64c8b82108485c202012633b0fd9451c2a3ad"} Sep 30 12:36:35 crc kubenswrapper[5002]: I0930 12:36:35.576266 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4e2dde852f42100f5b1f888782d64c8b82108485c202012633b0fd9451c2a3ad" Sep 30 12:36:35 crc kubenswrapper[5002]: I0930 12:36:35.576305 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-wq6pr" Sep 30 12:36:35 crc kubenswrapper[5002]: I0930 12:36:35.654462 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4x6nx\" (UniqueName: \"kubernetes.io/projected/b3fe9aed-b73f-410d-a50f-dbbb964f3a8c-kube-api-access-4x6nx\") pod \"b3fe9aed-b73f-410d-a50f-dbbb964f3a8c\" (UID: \"b3fe9aed-b73f-410d-a50f-dbbb964f3a8c\") " Sep 30 12:36:35 crc kubenswrapper[5002]: I0930 12:36:35.654740 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wn4vk\" (UniqueName: \"kubernetes.io/projected/d006d9db-f5bd-4a8e-b09e-3befefe10c2a-kube-api-access-wn4vk\") pod \"d006d9db-f5bd-4a8e-b09e-3befefe10c2a\" (UID: \"d006d9db-f5bd-4a8e-b09e-3befefe10c2a\") " Sep 30 12:36:35 crc kubenswrapper[5002]: I0930 12:36:35.658722 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3fe9aed-b73f-410d-a50f-dbbb964f3a8c-kube-api-access-4x6nx" (OuterVolumeSpecName: "kube-api-access-4x6nx") pod "b3fe9aed-b73f-410d-a50f-dbbb964f3a8c" (UID: "b3fe9aed-b73f-410d-a50f-dbbb964f3a8c"). InnerVolumeSpecName "kube-api-access-4x6nx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:36:35 crc kubenswrapper[5002]: I0930 12:36:35.660596 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d006d9db-f5bd-4a8e-b09e-3befefe10c2a-kube-api-access-wn4vk" (OuterVolumeSpecName: "kube-api-access-wn4vk") pod "d006d9db-f5bd-4a8e-b09e-3befefe10c2a" (UID: "d006d9db-f5bd-4a8e-b09e-3befefe10c2a"). InnerVolumeSpecName "kube-api-access-wn4vk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:36:35 crc kubenswrapper[5002]: I0930 12:36:35.663097 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-crf9m" Sep 30 12:36:35 crc kubenswrapper[5002]: I0930 12:36:35.757128 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wn4vk\" (UniqueName: \"kubernetes.io/projected/d006d9db-f5bd-4a8e-b09e-3befefe10c2a-kube-api-access-wn4vk\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:35 crc kubenswrapper[5002]: I0930 12:36:35.757165 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4x6nx\" (UniqueName: \"kubernetes.io/projected/b3fe9aed-b73f-410d-a50f-dbbb964f3a8c-kube-api-access-4x6nx\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:35 crc kubenswrapper[5002]: I0930 12:36:35.857877 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4c7mv\" (UniqueName: \"kubernetes.io/projected/aeb00f00-434a-44d3-aa16-2ea38921064c-kube-api-access-4c7mv\") pod \"aeb00f00-434a-44d3-aa16-2ea38921064c\" (UID: \"aeb00f00-434a-44d3-aa16-2ea38921064c\") " Sep 30 12:36:35 crc kubenswrapper[5002]: I0930 12:36:35.863622 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aeb00f00-434a-44d3-aa16-2ea38921064c-kube-api-access-4c7mv" (OuterVolumeSpecName: "kube-api-access-4c7mv") pod "aeb00f00-434a-44d3-aa16-2ea38921064c" (UID: "aeb00f00-434a-44d3-aa16-2ea38921064c"). InnerVolumeSpecName "kube-api-access-4c7mv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:36:35 crc kubenswrapper[5002]: I0930 12:36:35.960154 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4c7mv\" (UniqueName: \"kubernetes.io/projected/aeb00f00-434a-44d3-aa16-2ea38921064c-kube-api-access-4c7mv\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:36 crc kubenswrapper[5002]: I0930 12:36:36.586154 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-crf9m" Sep 30 12:36:36 crc kubenswrapper[5002]: I0930 12:36:36.586146 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-2xqnk" event={"ID":"e2a8ad8b-59e6-407f-b634-38dc40374764","Type":"ContainerStarted","Data":"a782f97e1fa15c29109a4ce3a8bfd14304f0c32d00cdee3a86f5283b51fefa0e"} Sep 30 12:36:36 crc kubenswrapper[5002]: I0930 12:36:36.604941 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-2xqnk" podStartSLOduration=5.595960353 podStartE2EDuration="9.604917543s" podCreationTimestamp="2025-09-30 12:36:27 +0000 UTC" firstStartedPulling="2025-09-30 12:36:31.501804176 +0000 UTC m=+965.751486322" lastFinishedPulling="2025-09-30 12:36:35.510761346 +0000 UTC m=+969.760443512" observedRunningTime="2025-09-30 12:36:36.603224316 +0000 UTC m=+970.852906462" watchObservedRunningTime="2025-09-30 12:36:36.604917543 +0000 UTC m=+970.854599719" Sep 30 12:36:38 crc kubenswrapper[5002]: I0930 12:36:38.637215 5002 generic.go:334] "Generic (PLEG): container finished" podID="76838250-6394-4471-8009-e204115dc84c" containerID="c62def32dc5b26d5fce6bf5e1d5c485cde81cbb6939baf1fee77af7a4c03cc63" exitCode=0 Sep 30 12:36:38 crc kubenswrapper[5002]: I0930 12:36:38.637403 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-cpvxh" event={"ID":"76838250-6394-4471-8009-e204115dc84c","Type":"ContainerDied","Data":"c62def32dc5b26d5fce6bf5e1d5c485cde81cbb6939baf1fee77af7a4c03cc63"} Sep 30 12:36:39 crc kubenswrapper[5002]: I0930 12:36:39.656055 5002 generic.go:334] "Generic (PLEG): container finished" podID="e2a8ad8b-59e6-407f-b634-38dc40374764" containerID="a782f97e1fa15c29109a4ce3a8bfd14304f0c32d00cdee3a86f5283b51fefa0e" exitCode=0 Sep 30 12:36:39 crc kubenswrapper[5002]: I0930 12:36:39.656151 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-2xqnk" event={"ID":"e2a8ad8b-59e6-407f-b634-38dc40374764","Type":"ContainerDied","Data":"a782f97e1fa15c29109a4ce3a8bfd14304f0c32d00cdee3a86f5283b51fefa0e"} Sep 30 12:36:40 crc kubenswrapper[5002]: I0930 12:36:40.165304 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-cpvxh" Sep 30 12:36:40 crc kubenswrapper[5002]: I0930 12:36:40.329465 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76838250-6394-4471-8009-e204115dc84c-combined-ca-bundle\") pod \"76838250-6394-4471-8009-e204115dc84c\" (UID: \"76838250-6394-4471-8009-e204115dc84c\") " Sep 30 12:36:40 crc kubenswrapper[5002]: I0930 12:36:40.329545 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76838250-6394-4471-8009-e204115dc84c-config-data\") pod \"76838250-6394-4471-8009-e204115dc84c\" (UID: \"76838250-6394-4471-8009-e204115dc84c\") " Sep 30 12:36:40 crc kubenswrapper[5002]: I0930 12:36:40.329568 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t5pjj\" (UniqueName: \"kubernetes.io/projected/76838250-6394-4471-8009-e204115dc84c-kube-api-access-t5pjj\") pod \"76838250-6394-4471-8009-e204115dc84c\" (UID: \"76838250-6394-4471-8009-e204115dc84c\") " Sep 30 12:36:40 crc kubenswrapper[5002]: I0930 12:36:40.329602 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/76838250-6394-4471-8009-e204115dc84c-db-sync-config-data\") pod \"76838250-6394-4471-8009-e204115dc84c\" (UID: \"76838250-6394-4471-8009-e204115dc84c\") " Sep 30 12:36:40 crc kubenswrapper[5002]: I0930 12:36:40.335206 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76838250-6394-4471-8009-e204115dc84c-kube-api-access-t5pjj" (OuterVolumeSpecName: "kube-api-access-t5pjj") pod "76838250-6394-4471-8009-e204115dc84c" (UID: "76838250-6394-4471-8009-e204115dc84c"). InnerVolumeSpecName "kube-api-access-t5pjj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:36:40 crc kubenswrapper[5002]: I0930 12:36:40.336078 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76838250-6394-4471-8009-e204115dc84c-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "76838250-6394-4471-8009-e204115dc84c" (UID: "76838250-6394-4471-8009-e204115dc84c"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:36:40 crc kubenswrapper[5002]: I0930 12:36:40.367066 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76838250-6394-4471-8009-e204115dc84c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "76838250-6394-4471-8009-e204115dc84c" (UID: "76838250-6394-4471-8009-e204115dc84c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:36:40 crc kubenswrapper[5002]: I0930 12:36:40.371567 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76838250-6394-4471-8009-e204115dc84c-config-data" (OuterVolumeSpecName: "config-data") pod "76838250-6394-4471-8009-e204115dc84c" (UID: "76838250-6394-4471-8009-e204115dc84c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:36:40 crc kubenswrapper[5002]: I0930 12:36:40.431115 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76838250-6394-4471-8009-e204115dc84c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:40 crc kubenswrapper[5002]: I0930 12:36:40.431155 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76838250-6394-4471-8009-e204115dc84c-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:40 crc kubenswrapper[5002]: I0930 12:36:40.431170 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t5pjj\" (UniqueName: \"kubernetes.io/projected/76838250-6394-4471-8009-e204115dc84c-kube-api-access-t5pjj\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:40 crc kubenswrapper[5002]: I0930 12:36:40.431184 5002 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/76838250-6394-4471-8009-e204115dc84c-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:40 crc kubenswrapper[5002]: I0930 12:36:40.671073 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-cpvxh" Sep 30 12:36:40 crc kubenswrapper[5002]: I0930 12:36:40.671413 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-cpvxh" event={"ID":"76838250-6394-4471-8009-e204115dc84c","Type":"ContainerDied","Data":"e90abbd4ee54cdfd11c1f971802650e3b7c2fcda5fe27df8d0694656243b037d"} Sep 30 12:36:40 crc kubenswrapper[5002]: I0930 12:36:40.672173 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e90abbd4ee54cdfd11c1f971802650e3b7c2fcda5fe27df8d0694656243b037d" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.017789 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-2xqnk" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.074877 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-g4bpt"] Sep 30 12:36:41 crc kubenswrapper[5002]: E0930 12:36:41.075421 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e48a90f9-2cc4-41fc-a164-301c62c34023" containerName="init" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.075436 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="e48a90f9-2cc4-41fc-a164-301c62c34023" containerName="init" Sep 30 12:36:41 crc kubenswrapper[5002]: E0930 12:36:41.075449 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e48a90f9-2cc4-41fc-a164-301c62c34023" containerName="dnsmasq-dns" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.075456 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="e48a90f9-2cc4-41fc-a164-301c62c34023" containerName="dnsmasq-dns" Sep 30 12:36:41 crc kubenswrapper[5002]: E0930 12:36:41.075488 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aeb00f00-434a-44d3-aa16-2ea38921064c" containerName="mariadb-database-create" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.079314 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="aeb00f00-434a-44d3-aa16-2ea38921064c" containerName="mariadb-database-create" Sep 30 12:36:41 crc kubenswrapper[5002]: E0930 12:36:41.079341 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76838250-6394-4471-8009-e204115dc84c" containerName="glance-db-sync" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.079348 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="76838250-6394-4471-8009-e204115dc84c" containerName="glance-db-sync" Sep 30 12:36:41 crc kubenswrapper[5002]: E0930 12:36:41.079356 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3fe9aed-b73f-410d-a50f-dbbb964f3a8c" containerName="mariadb-database-create" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.079364 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3fe9aed-b73f-410d-a50f-dbbb964f3a8c" containerName="mariadb-database-create" Sep 30 12:36:41 crc kubenswrapper[5002]: E0930 12:36:41.079376 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d006d9db-f5bd-4a8e-b09e-3befefe10c2a" containerName="mariadb-database-create" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.079381 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="d006d9db-f5bd-4a8e-b09e-3befefe10c2a" containerName="mariadb-database-create" Sep 30 12:36:41 crc kubenswrapper[5002]: E0930 12:36:41.079392 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2a8ad8b-59e6-407f-b634-38dc40374764" containerName="keystone-db-sync" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.079412 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2a8ad8b-59e6-407f-b634-38dc40374764" containerName="keystone-db-sync" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.079641 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="d006d9db-f5bd-4a8e-b09e-3befefe10c2a" containerName="mariadb-database-create" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.079661 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="76838250-6394-4471-8009-e204115dc84c" containerName="glance-db-sync" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.079672 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3fe9aed-b73f-410d-a50f-dbbb964f3a8c" containerName="mariadb-database-create" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.079681 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="aeb00f00-434a-44d3-aa16-2ea38921064c" containerName="mariadb-database-create" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.079690 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2a8ad8b-59e6-407f-b634-38dc40374764" containerName="keystone-db-sync" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.079705 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="e48a90f9-2cc4-41fc-a164-301c62c34023" containerName="dnsmasq-dns" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.080520 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-g4bpt" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.083346 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-g4bpt"] Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.146933 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2a8ad8b-59e6-407f-b634-38dc40374764-config-data\") pod \"e2a8ad8b-59e6-407f-b634-38dc40374764\" (UID: \"e2a8ad8b-59e6-407f-b634-38dc40374764\") " Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.147009 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7tkjn\" (UniqueName: \"kubernetes.io/projected/e2a8ad8b-59e6-407f-b634-38dc40374764-kube-api-access-7tkjn\") pod \"e2a8ad8b-59e6-407f-b634-38dc40374764\" (UID: \"e2a8ad8b-59e6-407f-b634-38dc40374764\") " Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.147133 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2a8ad8b-59e6-407f-b634-38dc40374764-combined-ca-bundle\") pod \"e2a8ad8b-59e6-407f-b634-38dc40374764\" (UID: \"e2a8ad8b-59e6-407f-b634-38dc40374764\") " Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.158661 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2a8ad8b-59e6-407f-b634-38dc40374764-kube-api-access-7tkjn" (OuterVolumeSpecName: "kube-api-access-7tkjn") pod "e2a8ad8b-59e6-407f-b634-38dc40374764" (UID: "e2a8ad8b-59e6-407f-b634-38dc40374764"). InnerVolumeSpecName "kube-api-access-7tkjn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.181339 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2a8ad8b-59e6-407f-b634-38dc40374764-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e2a8ad8b-59e6-407f-b634-38dc40374764" (UID: "e2a8ad8b-59e6-407f-b634-38dc40374764"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.220234 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2a8ad8b-59e6-407f-b634-38dc40374764-config-data" (OuterVolumeSpecName: "config-data") pod "e2a8ad8b-59e6-407f-b634-38dc40374764" (UID: "e2a8ad8b-59e6-407f-b634-38dc40374764"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.249321 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-config\") pod \"dnsmasq-dns-7ff5475cc9-g4bpt\" (UID: \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g4bpt" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.249364 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-ovsdbserver-sb\") pod \"dnsmasq-dns-7ff5475cc9-g4bpt\" (UID: \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g4bpt" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.249392 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-ovsdbserver-nb\") pod \"dnsmasq-dns-7ff5475cc9-g4bpt\" (UID: \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g4bpt" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.249407 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-792q8\" (UniqueName: \"kubernetes.io/projected/137d5592-5ce3-4aa8-924c-d287bcd87d4c-kube-api-access-792q8\") pod \"dnsmasq-dns-7ff5475cc9-g4bpt\" (UID: \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g4bpt" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.249456 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-dns-swift-storage-0\") pod \"dnsmasq-dns-7ff5475cc9-g4bpt\" (UID: \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g4bpt" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.249533 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-dns-svc\") pod \"dnsmasq-dns-7ff5475cc9-g4bpt\" (UID: \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g4bpt" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.249612 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2a8ad8b-59e6-407f-b634-38dc40374764-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.249625 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2a8ad8b-59e6-407f-b634-38dc40374764-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.249634 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7tkjn\" (UniqueName: \"kubernetes.io/projected/e2a8ad8b-59e6-407f-b634-38dc40374764-kube-api-access-7tkjn\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.351343 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-dns-swift-storage-0\") pod \"dnsmasq-dns-7ff5475cc9-g4bpt\" (UID: \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g4bpt" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.351415 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-dns-svc\") pod \"dnsmasq-dns-7ff5475cc9-g4bpt\" (UID: \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g4bpt" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.351540 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-config\") pod \"dnsmasq-dns-7ff5475cc9-g4bpt\" (UID: \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g4bpt" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.351567 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-ovsdbserver-sb\") pod \"dnsmasq-dns-7ff5475cc9-g4bpt\" (UID: \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g4bpt" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.351600 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-ovsdbserver-nb\") pod \"dnsmasq-dns-7ff5475cc9-g4bpt\" (UID: \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g4bpt" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.351620 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-792q8\" (UniqueName: \"kubernetes.io/projected/137d5592-5ce3-4aa8-924c-d287bcd87d4c-kube-api-access-792q8\") pod \"dnsmasq-dns-7ff5475cc9-g4bpt\" (UID: \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g4bpt" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.352177 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-dns-swift-storage-0\") pod \"dnsmasq-dns-7ff5475cc9-g4bpt\" (UID: \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g4bpt" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.352221 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-ovsdbserver-nb\") pod \"dnsmasq-dns-7ff5475cc9-g4bpt\" (UID: \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g4bpt" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.352367 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-dns-svc\") pod \"dnsmasq-dns-7ff5475cc9-g4bpt\" (UID: \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g4bpt" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.352509 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-config\") pod \"dnsmasq-dns-7ff5475cc9-g4bpt\" (UID: \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g4bpt" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.353040 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-ovsdbserver-sb\") pod \"dnsmasq-dns-7ff5475cc9-g4bpt\" (UID: \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g4bpt" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.367057 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-792q8\" (UniqueName: \"kubernetes.io/projected/137d5592-5ce3-4aa8-924c-d287bcd87d4c-kube-api-access-792q8\") pod \"dnsmasq-dns-7ff5475cc9-g4bpt\" (UID: \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g4bpt" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.429663 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-g4bpt" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.683241 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-2xqnk" event={"ID":"e2a8ad8b-59e6-407f-b634-38dc40374764","Type":"ContainerDied","Data":"effdf52fac7595d41603f6a2ab7739f7f1abd0de2ece68d4b5abcb7b950907c7"} Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.683585 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="effdf52fac7595d41603f6a2ab7739f7f1abd0de2ece68d4b5abcb7b950907c7" Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.683296 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-2xqnk" Sep 30 12:36:41 crc kubenswrapper[5002]: W0930 12:36:41.855979 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod137d5592_5ce3_4aa8_924c_d287bcd87d4c.slice/crio-dd5d8a509e680e3a8933eb36a5e2cc80ef58a1c308fa99534abb12cb58b4a130 WatchSource:0}: Error finding container dd5d8a509e680e3a8933eb36a5e2cc80ef58a1c308fa99534abb12cb58b4a130: Status 404 returned error can't find the container with id dd5d8a509e680e3a8933eb36a5e2cc80ef58a1c308fa99534abb12cb58b4a130 Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.856157 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-g4bpt"] Sep 30 12:36:41 crc kubenswrapper[5002]: I0930 12:36:41.982227 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-g4bpt"] Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.014242 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-j97p2"] Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.015851 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-j97p2" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.038980 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-j97p2"] Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.067805 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-ovsdbserver-nb\") pod \"dnsmasq-dns-5c5cc7c5ff-j97p2\" (UID: \"94030f2b-828a-4de2-9975-688abbf7e3e9\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-j97p2" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.067848 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-dns-swift-storage-0\") pod \"dnsmasq-dns-5c5cc7c5ff-j97p2\" (UID: \"94030f2b-828a-4de2-9975-688abbf7e3e9\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-j97p2" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.067895 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-dns-svc\") pod \"dnsmasq-dns-5c5cc7c5ff-j97p2\" (UID: \"94030f2b-828a-4de2-9975-688abbf7e3e9\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-j97p2" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.067925 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-config\") pod \"dnsmasq-dns-5c5cc7c5ff-j97p2\" (UID: \"94030f2b-828a-4de2-9975-688abbf7e3e9\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-j97p2" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.067956 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-ovsdbserver-sb\") pod \"dnsmasq-dns-5c5cc7c5ff-j97p2\" (UID: \"94030f2b-828a-4de2-9975-688abbf7e3e9\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-j97p2" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.068026 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwdbl\" (UniqueName: \"kubernetes.io/projected/94030f2b-828a-4de2-9975-688abbf7e3e9-kube-api-access-nwdbl\") pod \"dnsmasq-dns-5c5cc7c5ff-j97p2\" (UID: \"94030f2b-828a-4de2-9975-688abbf7e3e9\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-j97p2" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.105411 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-tt2dz"] Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.106625 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tt2dz" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.109940 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.110138 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.110246 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-n9gzw" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.110367 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.117162 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-tt2dz"] Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.170043 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-ovsdbserver-nb\") pod \"dnsmasq-dns-5c5cc7c5ff-j97p2\" (UID: \"94030f2b-828a-4de2-9975-688abbf7e3e9\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-j97p2" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.170085 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-dns-swift-storage-0\") pod \"dnsmasq-dns-5c5cc7c5ff-j97p2\" (UID: \"94030f2b-828a-4de2-9975-688abbf7e3e9\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-j97p2" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.170135 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-dns-svc\") pod \"dnsmasq-dns-5c5cc7c5ff-j97p2\" (UID: \"94030f2b-828a-4de2-9975-688abbf7e3e9\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-j97p2" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.170166 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-config\") pod \"dnsmasq-dns-5c5cc7c5ff-j97p2\" (UID: \"94030f2b-828a-4de2-9975-688abbf7e3e9\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-j97p2" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.170199 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-ovsdbserver-sb\") pod \"dnsmasq-dns-5c5cc7c5ff-j97p2\" (UID: \"94030f2b-828a-4de2-9975-688abbf7e3e9\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-j97p2" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.170224 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwdbl\" (UniqueName: \"kubernetes.io/projected/94030f2b-828a-4de2-9975-688abbf7e3e9-kube-api-access-nwdbl\") pod \"dnsmasq-dns-5c5cc7c5ff-j97p2\" (UID: \"94030f2b-828a-4de2-9975-688abbf7e3e9\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-j97p2" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.171551 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-ovsdbserver-nb\") pod \"dnsmasq-dns-5c5cc7c5ff-j97p2\" (UID: \"94030f2b-828a-4de2-9975-688abbf7e3e9\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-j97p2" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.172043 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-dns-swift-storage-0\") pod \"dnsmasq-dns-5c5cc7c5ff-j97p2\" (UID: \"94030f2b-828a-4de2-9975-688abbf7e3e9\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-j97p2" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.179719 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-dns-svc\") pod \"dnsmasq-dns-5c5cc7c5ff-j97p2\" (UID: \"94030f2b-828a-4de2-9975-688abbf7e3e9\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-j97p2" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.186123 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-ovsdbserver-sb\") pod \"dnsmasq-dns-5c5cc7c5ff-j97p2\" (UID: \"94030f2b-828a-4de2-9975-688abbf7e3e9\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-j97p2" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.186238 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-config\") pod \"dnsmasq-dns-5c5cc7c5ff-j97p2\" (UID: \"94030f2b-828a-4de2-9975-688abbf7e3e9\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-j97p2" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.204391 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7cf8dd6b55-z9psc"] Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.226307 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7cf8dd6b55-z9psc"] Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.226405 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7cf8dd6b55-z9psc" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.230066 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwdbl\" (UniqueName: \"kubernetes.io/projected/94030f2b-828a-4de2-9975-688abbf7e3e9-kube-api-access-nwdbl\") pod \"dnsmasq-dns-5c5cc7c5ff-j97p2\" (UID: \"94030f2b-828a-4de2-9975-688abbf7e3e9\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-j97p2" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.230375 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.230656 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.230766 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-b2q9d" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.230861 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.271400 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-combined-ca-bundle\") pod \"keystone-bootstrap-tt2dz\" (UID: \"b8d82577-a47f-40e2-9986-5883fea9ddf5\") " pod="openstack/keystone-bootstrap-tt2dz" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.271489 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-fernet-keys\") pod \"keystone-bootstrap-tt2dz\" (UID: \"b8d82577-a47f-40e2-9986-5883fea9ddf5\") " pod="openstack/keystone-bootstrap-tt2dz" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.271888 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-config-data\") pod \"keystone-bootstrap-tt2dz\" (UID: \"b8d82577-a47f-40e2-9986-5883fea9ddf5\") " pod="openstack/keystone-bootstrap-tt2dz" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.271933 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmgms\" (UniqueName: \"kubernetes.io/projected/b8d82577-a47f-40e2-9986-5883fea9ddf5-kube-api-access-mmgms\") pod \"keystone-bootstrap-tt2dz\" (UID: \"b8d82577-a47f-40e2-9986-5883fea9ddf5\") " pod="openstack/keystone-bootstrap-tt2dz" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.271973 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-scripts\") pod \"keystone-bootstrap-tt2dz\" (UID: \"b8d82577-a47f-40e2-9986-5883fea9ddf5\") " pod="openstack/keystone-bootstrap-tt2dz" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.272415 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-credential-keys\") pod \"keystone-bootstrap-tt2dz\" (UID: \"b8d82577-a47f-40e2-9986-5883fea9ddf5\") " pod="openstack/keystone-bootstrap-tt2dz" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.323220 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-799d9cbb9f-xj4zl"] Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.328300 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-799d9cbb9f-xj4zl" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.343289 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-799d9cbb9f-xj4zl"] Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.373688 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7d2bf63f-4cce-402b-b774-e77dc172959c-scripts\") pod \"horizon-7cf8dd6b55-z9psc\" (UID: \"7d2bf63f-4cce-402b-b774-e77dc172959c\") " pod="openstack/horizon-7cf8dd6b55-z9psc" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.373724 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/28380b30-cf01-44f7-8ec6-b1812a5e8435-scripts\") pod \"horizon-799d9cbb9f-xj4zl\" (UID: \"28380b30-cf01-44f7-8ec6-b1812a5e8435\") " pod="openstack/horizon-799d9cbb9f-xj4zl" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.373748 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7d2bf63f-4cce-402b-b774-e77dc172959c-config-data\") pod \"horizon-7cf8dd6b55-z9psc\" (UID: \"7d2bf63f-4cce-402b-b774-e77dc172959c\") " pod="openstack/horizon-7cf8dd6b55-z9psc" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.373784 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-config-data\") pod \"keystone-bootstrap-tt2dz\" (UID: \"b8d82577-a47f-40e2-9986-5883fea9ddf5\") " pod="openstack/keystone-bootstrap-tt2dz" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.373989 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmgms\" (UniqueName: \"kubernetes.io/projected/b8d82577-a47f-40e2-9986-5883fea9ddf5-kube-api-access-mmgms\") pod \"keystone-bootstrap-tt2dz\" (UID: \"b8d82577-a47f-40e2-9986-5883fea9ddf5\") " pod="openstack/keystone-bootstrap-tt2dz" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.374014 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttjhs\" (UniqueName: \"kubernetes.io/projected/28380b30-cf01-44f7-8ec6-b1812a5e8435-kube-api-access-ttjhs\") pod \"horizon-799d9cbb9f-xj4zl\" (UID: \"28380b30-cf01-44f7-8ec6-b1812a5e8435\") " pod="openstack/horizon-799d9cbb9f-xj4zl" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.374036 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-scripts\") pod \"keystone-bootstrap-tt2dz\" (UID: \"b8d82577-a47f-40e2-9986-5883fea9ddf5\") " pod="openstack/keystone-bootstrap-tt2dz" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.374058 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d2bf63f-4cce-402b-b774-e77dc172959c-logs\") pod \"horizon-7cf8dd6b55-z9psc\" (UID: \"7d2bf63f-4cce-402b-b774-e77dc172959c\") " pod="openstack/horizon-7cf8dd6b55-z9psc" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.374075 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-credential-keys\") pod \"keystone-bootstrap-tt2dz\" (UID: \"b8d82577-a47f-40e2-9986-5883fea9ddf5\") " pod="openstack/keystone-bootstrap-tt2dz" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.374093 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7d2bf63f-4cce-402b-b774-e77dc172959c-horizon-secret-key\") pod \"horizon-7cf8dd6b55-z9psc\" (UID: \"7d2bf63f-4cce-402b-b774-e77dc172959c\") " pod="openstack/horizon-7cf8dd6b55-z9psc" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.374112 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-combined-ca-bundle\") pod \"keystone-bootstrap-tt2dz\" (UID: \"b8d82577-a47f-40e2-9986-5883fea9ddf5\") " pod="openstack/keystone-bootstrap-tt2dz" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.374129 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28380b30-cf01-44f7-8ec6-b1812a5e8435-logs\") pod \"horizon-799d9cbb9f-xj4zl\" (UID: \"28380b30-cf01-44f7-8ec6-b1812a5e8435\") " pod="openstack/horizon-799d9cbb9f-xj4zl" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.374144 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-blm2s\" (UniqueName: \"kubernetes.io/projected/7d2bf63f-4cce-402b-b774-e77dc172959c-kube-api-access-blm2s\") pod \"horizon-7cf8dd6b55-z9psc\" (UID: \"7d2bf63f-4cce-402b-b774-e77dc172959c\") " pod="openstack/horizon-7cf8dd6b55-z9psc" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.374166 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/28380b30-cf01-44f7-8ec6-b1812a5e8435-config-data\") pod \"horizon-799d9cbb9f-xj4zl\" (UID: \"28380b30-cf01-44f7-8ec6-b1812a5e8435\") " pod="openstack/horizon-799d9cbb9f-xj4zl" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.374186 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/28380b30-cf01-44f7-8ec6-b1812a5e8435-horizon-secret-key\") pod \"horizon-799d9cbb9f-xj4zl\" (UID: \"28380b30-cf01-44f7-8ec6-b1812a5e8435\") " pod="openstack/horizon-799d9cbb9f-xj4zl" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.374206 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-fernet-keys\") pod \"keystone-bootstrap-tt2dz\" (UID: \"b8d82577-a47f-40e2-9986-5883fea9ddf5\") " pod="openstack/keystone-bootstrap-tt2dz" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.378906 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-config-data\") pod \"keystone-bootstrap-tt2dz\" (UID: \"b8d82577-a47f-40e2-9986-5883fea9ddf5\") " pod="openstack/keystone-bootstrap-tt2dz" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.382727 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-fernet-keys\") pod \"keystone-bootstrap-tt2dz\" (UID: \"b8d82577-a47f-40e2-9986-5883fea9ddf5\") " pod="openstack/keystone-bootstrap-tt2dz" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.383026 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-credential-keys\") pod \"keystone-bootstrap-tt2dz\" (UID: \"b8d82577-a47f-40e2-9986-5883fea9ddf5\") " pod="openstack/keystone-bootstrap-tt2dz" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.383041 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-combined-ca-bundle\") pod \"keystone-bootstrap-tt2dz\" (UID: \"b8d82577-a47f-40e2-9986-5883fea9ddf5\") " pod="openstack/keystone-bootstrap-tt2dz" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.383949 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-scripts\") pod \"keystone-bootstrap-tt2dz\" (UID: \"b8d82577-a47f-40e2-9986-5883fea9ddf5\") " pod="openstack/keystone-bootstrap-tt2dz" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.387655 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.389011 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.392547 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-4pcks" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.392778 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.392989 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.402627 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.408161 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.417773 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.431591 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmgms\" (UniqueName: \"kubernetes.io/projected/b8d82577-a47f-40e2-9986-5883fea9ddf5-kube-api-access-mmgms\") pod \"keystone-bootstrap-tt2dz\" (UID: \"b8d82577-a47f-40e2-9986-5883fea9ddf5\") " pod="openstack/keystone-bootstrap-tt2dz" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.440195 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.440427 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.451718 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-j97p2" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.458073 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.480900 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d2bf63f-4cce-402b-b774-e77dc172959c-logs\") pod \"horizon-7cf8dd6b55-z9psc\" (UID: \"7d2bf63f-4cce-402b-b774-e77dc172959c\") " pod="openstack/horizon-7cf8dd6b55-z9psc" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.480943 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7d2bf63f-4cce-402b-b774-e77dc172959c-horizon-secret-key\") pod \"horizon-7cf8dd6b55-z9psc\" (UID: \"7d2bf63f-4cce-402b-b774-e77dc172959c\") " pod="openstack/horizon-7cf8dd6b55-z9psc" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.480973 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28380b30-cf01-44f7-8ec6-b1812a5e8435-logs\") pod \"horizon-799d9cbb9f-xj4zl\" (UID: \"28380b30-cf01-44f7-8ec6-b1812a5e8435\") " pod="openstack/horizon-799d9cbb9f-xj4zl" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.480997 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-blm2s\" (UniqueName: \"kubernetes.io/projected/7d2bf63f-4cce-402b-b774-e77dc172959c-kube-api-access-blm2s\") pod \"horizon-7cf8dd6b55-z9psc\" (UID: \"7d2bf63f-4cce-402b-b774-e77dc172959c\") " pod="openstack/horizon-7cf8dd6b55-z9psc" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.481022 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/28380b30-cf01-44f7-8ec6-b1812a5e8435-config-data\") pod \"horizon-799d9cbb9f-xj4zl\" (UID: \"28380b30-cf01-44f7-8ec6-b1812a5e8435\") " pod="openstack/horizon-799d9cbb9f-xj4zl" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.481049 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/28380b30-cf01-44f7-8ec6-b1812a5e8435-horizon-secret-key\") pod \"horizon-799d9cbb9f-xj4zl\" (UID: \"28380b30-cf01-44f7-8ec6-b1812a5e8435\") " pod="openstack/horizon-799d9cbb9f-xj4zl" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.481106 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7d2bf63f-4cce-402b-b774-e77dc172959c-scripts\") pod \"horizon-7cf8dd6b55-z9psc\" (UID: \"7d2bf63f-4cce-402b-b774-e77dc172959c\") " pod="openstack/horizon-7cf8dd6b55-z9psc" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.481127 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/28380b30-cf01-44f7-8ec6-b1812a5e8435-scripts\") pod \"horizon-799d9cbb9f-xj4zl\" (UID: \"28380b30-cf01-44f7-8ec6-b1812a5e8435\") " pod="openstack/horizon-799d9cbb9f-xj4zl" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.481159 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7d2bf63f-4cce-402b-b774-e77dc172959c-config-data\") pod \"horizon-7cf8dd6b55-z9psc\" (UID: \"7d2bf63f-4cce-402b-b774-e77dc172959c\") " pod="openstack/horizon-7cf8dd6b55-z9psc" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.481219 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttjhs\" (UniqueName: \"kubernetes.io/projected/28380b30-cf01-44f7-8ec6-b1812a5e8435-kube-api-access-ttjhs\") pod \"horizon-799d9cbb9f-xj4zl\" (UID: \"28380b30-cf01-44f7-8ec6-b1812a5e8435\") " pod="openstack/horizon-799d9cbb9f-xj4zl" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.481847 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d2bf63f-4cce-402b-b774-e77dc172959c-logs\") pod \"horizon-7cf8dd6b55-z9psc\" (UID: \"7d2bf63f-4cce-402b-b774-e77dc172959c\") " pod="openstack/horizon-7cf8dd6b55-z9psc" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.484594 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28380b30-cf01-44f7-8ec6-b1812a5e8435-logs\") pod \"horizon-799d9cbb9f-xj4zl\" (UID: \"28380b30-cf01-44f7-8ec6-b1812a5e8435\") " pod="openstack/horizon-799d9cbb9f-xj4zl" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.485628 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/28380b30-cf01-44f7-8ec6-b1812a5e8435-scripts\") pod \"horizon-799d9cbb9f-xj4zl\" (UID: \"28380b30-cf01-44f7-8ec6-b1812a5e8435\") " pod="openstack/horizon-799d9cbb9f-xj4zl" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.492582 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7d2bf63f-4cce-402b-b774-e77dc172959c-scripts\") pod \"horizon-7cf8dd6b55-z9psc\" (UID: \"7d2bf63f-4cce-402b-b774-e77dc172959c\") " pod="openstack/horizon-7cf8dd6b55-z9psc" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.496175 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7d2bf63f-4cce-402b-b774-e77dc172959c-config-data\") pod \"horizon-7cf8dd6b55-z9psc\" (UID: \"7d2bf63f-4cce-402b-b774-e77dc172959c\") " pod="openstack/horizon-7cf8dd6b55-z9psc" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.496384 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/28380b30-cf01-44f7-8ec6-b1812a5e8435-config-data\") pod \"horizon-799d9cbb9f-xj4zl\" (UID: \"28380b30-cf01-44f7-8ec6-b1812a5e8435\") " pod="openstack/horizon-799d9cbb9f-xj4zl" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.526336 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7d2bf63f-4cce-402b-b774-e77dc172959c-horizon-secret-key\") pod \"horizon-7cf8dd6b55-z9psc\" (UID: \"7d2bf63f-4cce-402b-b774-e77dc172959c\") " pod="openstack/horizon-7cf8dd6b55-z9psc" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.536905 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttjhs\" (UniqueName: \"kubernetes.io/projected/28380b30-cf01-44f7-8ec6-b1812a5e8435-kube-api-access-ttjhs\") pod \"horizon-799d9cbb9f-xj4zl\" (UID: \"28380b30-cf01-44f7-8ec6-b1812a5e8435\") " pod="openstack/horizon-799d9cbb9f-xj4zl" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.542339 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/28380b30-cf01-44f7-8ec6-b1812a5e8435-horizon-secret-key\") pod \"horizon-799d9cbb9f-xj4zl\" (UID: \"28380b30-cf01-44f7-8ec6-b1812a5e8435\") " pod="openstack/horizon-799d9cbb9f-xj4zl" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.557375 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tt2dz" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.561551 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.603641 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-blm2s\" (UniqueName: \"kubernetes.io/projected/7d2bf63f-4cce-402b-b774-e77dc172959c-kube-api-access-blm2s\") pod \"horizon-7cf8dd6b55-z9psc\" (UID: \"7d2bf63f-4cce-402b-b774-e77dc172959c\") " pod="openstack/horizon-7cf8dd6b55-z9psc" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.619206 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"14d6a446-4f72-417e-8bdd-af837eda2e70\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.619321 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22178f02-1a64-4a88-a564-fe143875e7df-log-httpd\") pod \"ceilometer-0\" (UID: \"22178f02-1a64-4a88-a564-fe143875e7df\") " pod="openstack/ceilometer-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.619361 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/22178f02-1a64-4a88-a564-fe143875e7df-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"22178f02-1a64-4a88-a564-fe143875e7df\") " pod="openstack/ceilometer-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.619402 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22178f02-1a64-4a88-a564-fe143875e7df-run-httpd\") pod \"ceilometer-0\" (UID: \"22178f02-1a64-4a88-a564-fe143875e7df\") " pod="openstack/ceilometer-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.619427 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14d6a446-4f72-417e-8bdd-af837eda2e70-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"14d6a446-4f72-417e-8bdd-af837eda2e70\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.619511 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/14d6a446-4f72-417e-8bdd-af837eda2e70-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"14d6a446-4f72-417e-8bdd-af837eda2e70\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.619571 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14d6a446-4f72-417e-8bdd-af837eda2e70-scripts\") pod \"glance-default-internal-api-0\" (UID: \"14d6a446-4f72-417e-8bdd-af837eda2e70\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.619608 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14d6a446-4f72-417e-8bdd-af837eda2e70-logs\") pod \"glance-default-internal-api-0\" (UID: \"14d6a446-4f72-417e-8bdd-af837eda2e70\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.619661 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bc7kt\" (UniqueName: \"kubernetes.io/projected/14d6a446-4f72-417e-8bdd-af837eda2e70-kube-api-access-bc7kt\") pod \"glance-default-internal-api-0\" (UID: \"14d6a446-4f72-417e-8bdd-af837eda2e70\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.622662 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-j97p2"] Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.622764 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.625337 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14d6a446-4f72-417e-8bdd-af837eda2e70-config-data\") pod \"glance-default-internal-api-0\" (UID: \"14d6a446-4f72-417e-8bdd-af837eda2e70\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.625387 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22178f02-1a64-4a88-a564-fe143875e7df-scripts\") pod \"ceilometer-0\" (UID: \"22178f02-1a64-4a88-a564-fe143875e7df\") " pod="openstack/ceilometer-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.625409 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v27pl\" (UniqueName: \"kubernetes.io/projected/22178f02-1a64-4a88-a564-fe143875e7df-kube-api-access-v27pl\") pod \"ceilometer-0\" (UID: \"22178f02-1a64-4a88-a564-fe143875e7df\") " pod="openstack/ceilometer-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.625588 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22178f02-1a64-4a88-a564-fe143875e7df-config-data\") pod \"ceilometer-0\" (UID: \"22178f02-1a64-4a88-a564-fe143875e7df\") " pod="openstack/ceilometer-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.625631 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22178f02-1a64-4a88-a564-fe143875e7df-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"22178f02-1a64-4a88-a564-fe143875e7df\") " pod="openstack/ceilometer-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.626483 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.651230 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-799d9cbb9f-xj4zl" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.660573 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.673321 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-bnf58"] Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.684799 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-bnf58"] Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.685100 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-bnf58" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.697782 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.698054 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-znl4p" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.698189 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.713976 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-frwpd"] Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.717382 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.719630 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-frwpd"] Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.726821 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22178f02-1a64-4a88-a564-fe143875e7df-config-data\") pod \"ceilometer-0\" (UID: \"22178f02-1a64-4a88-a564-fe143875e7df\") " pod="openstack/ceilometer-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.726864 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22178f02-1a64-4a88-a564-fe143875e7df-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"22178f02-1a64-4a88-a564-fe143875e7df\") " pod="openstack/ceilometer-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.726891 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"14d6a446-4f72-417e-8bdd-af837eda2e70\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.726936 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22178f02-1a64-4a88-a564-fe143875e7df-log-httpd\") pod \"ceilometer-0\" (UID: \"22178f02-1a64-4a88-a564-fe143875e7df\") " pod="openstack/ceilometer-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.726960 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/22178f02-1a64-4a88-a564-fe143875e7df-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"22178f02-1a64-4a88-a564-fe143875e7df\") " pod="openstack/ceilometer-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.726989 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22178f02-1a64-4a88-a564-fe143875e7df-run-httpd\") pod \"ceilometer-0\" (UID: \"22178f02-1a64-4a88-a564-fe143875e7df\") " pod="openstack/ceilometer-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.727009 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14d6a446-4f72-417e-8bdd-af837eda2e70-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"14d6a446-4f72-417e-8bdd-af837eda2e70\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.727046 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/14d6a446-4f72-417e-8bdd-af837eda2e70-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"14d6a446-4f72-417e-8bdd-af837eda2e70\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.727079 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14d6a446-4f72-417e-8bdd-af837eda2e70-scripts\") pod \"glance-default-internal-api-0\" (UID: \"14d6a446-4f72-417e-8bdd-af837eda2e70\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.727103 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14d6a446-4f72-417e-8bdd-af837eda2e70-logs\") pod \"glance-default-internal-api-0\" (UID: \"14d6a446-4f72-417e-8bdd-af837eda2e70\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.727134 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bc7kt\" (UniqueName: \"kubernetes.io/projected/14d6a446-4f72-417e-8bdd-af837eda2e70-kube-api-access-bc7kt\") pod \"glance-default-internal-api-0\" (UID: \"14d6a446-4f72-417e-8bdd-af837eda2e70\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.727162 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14d6a446-4f72-417e-8bdd-af837eda2e70-config-data\") pod \"glance-default-internal-api-0\" (UID: \"14d6a446-4f72-417e-8bdd-af837eda2e70\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.727181 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22178f02-1a64-4a88-a564-fe143875e7df-scripts\") pod \"ceilometer-0\" (UID: \"22178f02-1a64-4a88-a564-fe143875e7df\") " pod="openstack/ceilometer-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.727202 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v27pl\" (UniqueName: \"kubernetes.io/projected/22178f02-1a64-4a88-a564-fe143875e7df-kube-api-access-v27pl\") pod \"ceilometer-0\" (UID: \"22178f02-1a64-4a88-a564-fe143875e7df\") " pod="openstack/ceilometer-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.728321 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14d6a446-4f72-417e-8bdd-af837eda2e70-logs\") pod \"glance-default-internal-api-0\" (UID: \"14d6a446-4f72-417e-8bdd-af837eda2e70\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.728642 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/14d6a446-4f72-417e-8bdd-af837eda2e70-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"14d6a446-4f72-417e-8bdd-af837eda2e70\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.732232 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"14d6a446-4f72-417e-8bdd-af837eda2e70\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/glance-default-internal-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.732983 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22178f02-1a64-4a88-a564-fe143875e7df-log-httpd\") pod \"ceilometer-0\" (UID: \"22178f02-1a64-4a88-a564-fe143875e7df\") " pod="openstack/ceilometer-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.733106 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22178f02-1a64-4a88-a564-fe143875e7df-run-httpd\") pod \"ceilometer-0\" (UID: \"22178f02-1a64-4a88-a564-fe143875e7df\") " pod="openstack/ceilometer-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.747999 5002 generic.go:334] "Generic (PLEG): container finished" podID="137d5592-5ce3-4aa8-924c-d287bcd87d4c" containerID="81e16a3e62248c1d3069baaf8390029d6b8a5f28380fe3ccf9d7b36c652564e6" exitCode=0 Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.751636 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-g4bpt" event={"ID":"137d5592-5ce3-4aa8-924c-d287bcd87d4c","Type":"ContainerDied","Data":"81e16a3e62248c1d3069baaf8390029d6b8a5f28380fe3ccf9d7b36c652564e6"} Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.779713 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-g4bpt" event={"ID":"137d5592-5ce3-4aa8-924c-d287bcd87d4c","Type":"ContainerStarted","Data":"dd5d8a509e680e3a8933eb36a5e2cc80ef58a1c308fa99534abb12cb58b4a130"} Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.778413 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14d6a446-4f72-417e-8bdd-af837eda2e70-config-data\") pod \"glance-default-internal-api-0\" (UID: \"14d6a446-4f72-417e-8bdd-af837eda2e70\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.779106 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14d6a446-4f72-417e-8bdd-af837eda2e70-scripts\") pod \"glance-default-internal-api-0\" (UID: \"14d6a446-4f72-417e-8bdd-af837eda2e70\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.771827 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/22178f02-1a64-4a88-a564-fe143875e7df-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"22178f02-1a64-4a88-a564-fe143875e7df\") " pod="openstack/ceilometer-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.790620 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14d6a446-4f72-417e-8bdd-af837eda2e70-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"14d6a446-4f72-417e-8bdd-af837eda2e70\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.812853 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22178f02-1a64-4a88-a564-fe143875e7df-scripts\") pod \"ceilometer-0\" (UID: \"22178f02-1a64-4a88-a564-fe143875e7df\") " pod="openstack/ceilometer-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.813154 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22178f02-1a64-4a88-a564-fe143875e7df-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"22178f02-1a64-4a88-a564-fe143875e7df\") " pod="openstack/ceilometer-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.813733 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v27pl\" (UniqueName: \"kubernetes.io/projected/22178f02-1a64-4a88-a564-fe143875e7df-kube-api-access-v27pl\") pod \"ceilometer-0\" (UID: \"22178f02-1a64-4a88-a564-fe143875e7df\") " pod="openstack/ceilometer-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.814319 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22178f02-1a64-4a88-a564-fe143875e7df-config-data\") pod \"ceilometer-0\" (UID: \"22178f02-1a64-4a88-a564-fe143875e7df\") " pod="openstack/ceilometer-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.829220 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc38060e-b831-4300-9f91-e62f1075da79-config-data\") pod \"glance-default-external-api-0\" (UID: \"cc38060e-b831-4300-9f91-e62f1075da79\") " pod="openstack/glance-default-external-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.829262 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-frwpd\" (UID: \"e6c79eff-4c2e-42b9-af82-df66c7091400\") " pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.829288 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-frwpd\" (UID: \"e6c79eff-4c2e-42b9-af82-df66c7091400\") " pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.829335 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"cc38060e-b831-4300-9f91-e62f1075da79\") " pod="openstack/glance-default-external-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.829355 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cc38060e-b831-4300-9f91-e62f1075da79-scripts\") pod \"glance-default-external-api-0\" (UID: \"cc38060e-b831-4300-9f91-e62f1075da79\") " pod="openstack/glance-default-external-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.829376 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-combined-ca-bundle\") pod \"placement-db-sync-bnf58\" (UID: \"4fdf0c77-68ae-41ff-b6b5-122baa461b8c\") " pod="openstack/placement-db-sync-bnf58" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.829423 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w98jx\" (UniqueName: \"kubernetes.io/projected/cc38060e-b831-4300-9f91-e62f1075da79-kube-api-access-w98jx\") pod \"glance-default-external-api-0\" (UID: \"cc38060e-b831-4300-9f91-e62f1075da79\") " pod="openstack/glance-default-external-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.829446 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc38060e-b831-4300-9f91-e62f1075da79-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"cc38060e-b831-4300-9f91-e62f1075da79\") " pod="openstack/glance-default-external-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.829524 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-scripts\") pod \"placement-db-sync-bnf58\" (UID: \"4fdf0c77-68ae-41ff-b6b5-122baa461b8c\") " pod="openstack/placement-db-sync-bnf58" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.841997 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bc7kt\" (UniqueName: \"kubernetes.io/projected/14d6a446-4f72-417e-8bdd-af837eda2e70-kube-api-access-bc7kt\") pod \"glance-default-internal-api-0\" (UID: \"14d6a446-4f72-417e-8bdd-af837eda2e70\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.855176 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxgtw\" (UniqueName: \"kubernetes.io/projected/e6c79eff-4c2e-42b9-af82-df66c7091400-kube-api-access-zxgtw\") pod \"dnsmasq-dns-8b5c85b87-frwpd\" (UID: \"e6c79eff-4c2e-42b9-af82-df66c7091400\") " pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.855261 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-config\") pod \"dnsmasq-dns-8b5c85b87-frwpd\" (UID: \"e6c79eff-4c2e-42b9-af82-df66c7091400\") " pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.855307 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc38060e-b831-4300-9f91-e62f1075da79-logs\") pod \"glance-default-external-api-0\" (UID: \"cc38060e-b831-4300-9f91-e62f1075da79\") " pod="openstack/glance-default-external-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.855363 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pmcw\" (UniqueName: \"kubernetes.io/projected/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-kube-api-access-9pmcw\") pod \"placement-db-sync-bnf58\" (UID: \"4fdf0c77-68ae-41ff-b6b5-122baa461b8c\") " pod="openstack/placement-db-sync-bnf58" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.855408 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-logs\") pod \"placement-db-sync-bnf58\" (UID: \"4fdf0c77-68ae-41ff-b6b5-122baa461b8c\") " pod="openstack/placement-db-sync-bnf58" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.855508 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-frwpd\" (UID: \"e6c79eff-4c2e-42b9-af82-df66c7091400\") " pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.855539 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-config-data\") pod \"placement-db-sync-bnf58\" (UID: \"4fdf0c77-68ae-41ff-b6b5-122baa461b8c\") " pod="openstack/placement-db-sync-bnf58" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.855588 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-frwpd\" (UID: \"e6c79eff-4c2e-42b9-af82-df66c7091400\") " pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.855612 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cc38060e-b831-4300-9f91-e62f1075da79-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"cc38060e-b831-4300-9f91-e62f1075da79\") " pod="openstack/glance-default-external-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.870874 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7cf8dd6b55-z9psc" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.957706 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc38060e-b831-4300-9f91-e62f1075da79-config-data\") pod \"glance-default-external-api-0\" (UID: \"cc38060e-b831-4300-9f91-e62f1075da79\") " pod="openstack/glance-default-external-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.957750 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-frwpd\" (UID: \"e6c79eff-4c2e-42b9-af82-df66c7091400\") " pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.957767 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-frwpd\" (UID: \"e6c79eff-4c2e-42b9-af82-df66c7091400\") " pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.957804 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"cc38060e-b831-4300-9f91-e62f1075da79\") " pod="openstack/glance-default-external-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.957820 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cc38060e-b831-4300-9f91-e62f1075da79-scripts\") pod \"glance-default-external-api-0\" (UID: \"cc38060e-b831-4300-9f91-e62f1075da79\") " pod="openstack/glance-default-external-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.957838 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-combined-ca-bundle\") pod \"placement-db-sync-bnf58\" (UID: \"4fdf0c77-68ae-41ff-b6b5-122baa461b8c\") " pod="openstack/placement-db-sync-bnf58" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.957862 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w98jx\" (UniqueName: \"kubernetes.io/projected/cc38060e-b831-4300-9f91-e62f1075da79-kube-api-access-w98jx\") pod \"glance-default-external-api-0\" (UID: \"cc38060e-b831-4300-9f91-e62f1075da79\") " pod="openstack/glance-default-external-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.957877 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc38060e-b831-4300-9f91-e62f1075da79-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"cc38060e-b831-4300-9f91-e62f1075da79\") " pod="openstack/glance-default-external-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.957920 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-scripts\") pod \"placement-db-sync-bnf58\" (UID: \"4fdf0c77-68ae-41ff-b6b5-122baa461b8c\") " pod="openstack/placement-db-sync-bnf58" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.957942 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxgtw\" (UniqueName: \"kubernetes.io/projected/e6c79eff-4c2e-42b9-af82-df66c7091400-kube-api-access-zxgtw\") pod \"dnsmasq-dns-8b5c85b87-frwpd\" (UID: \"e6c79eff-4c2e-42b9-af82-df66c7091400\") " pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.957964 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-config\") pod \"dnsmasq-dns-8b5c85b87-frwpd\" (UID: \"e6c79eff-4c2e-42b9-af82-df66c7091400\") " pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.957985 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc38060e-b831-4300-9f91-e62f1075da79-logs\") pod \"glance-default-external-api-0\" (UID: \"cc38060e-b831-4300-9f91-e62f1075da79\") " pod="openstack/glance-default-external-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.958013 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pmcw\" (UniqueName: \"kubernetes.io/projected/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-kube-api-access-9pmcw\") pod \"placement-db-sync-bnf58\" (UID: \"4fdf0c77-68ae-41ff-b6b5-122baa461b8c\") " pod="openstack/placement-db-sync-bnf58" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.958046 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-logs\") pod \"placement-db-sync-bnf58\" (UID: \"4fdf0c77-68ae-41ff-b6b5-122baa461b8c\") " pod="openstack/placement-db-sync-bnf58" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.958072 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-frwpd\" (UID: \"e6c79eff-4c2e-42b9-af82-df66c7091400\") " pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.958091 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-config-data\") pod \"placement-db-sync-bnf58\" (UID: \"4fdf0c77-68ae-41ff-b6b5-122baa461b8c\") " pod="openstack/placement-db-sync-bnf58" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.958117 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-frwpd\" (UID: \"e6c79eff-4c2e-42b9-af82-df66c7091400\") " pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.958134 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cc38060e-b831-4300-9f91-e62f1075da79-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"cc38060e-b831-4300-9f91-e62f1075da79\") " pod="openstack/glance-default-external-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.958664 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cc38060e-b831-4300-9f91-e62f1075da79-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"cc38060e-b831-4300-9f91-e62f1075da79\") " pod="openstack/glance-default-external-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.959159 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"14d6a446-4f72-417e-8bdd-af837eda2e70\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.960076 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-frwpd\" (UID: \"e6c79eff-4c2e-42b9-af82-df66c7091400\") " pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.960143 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-config\") pod \"dnsmasq-dns-8b5c85b87-frwpd\" (UID: \"e6c79eff-4c2e-42b9-af82-df66c7091400\") " pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.960154 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-logs\") pod \"placement-db-sync-bnf58\" (UID: \"4fdf0c77-68ae-41ff-b6b5-122baa461b8c\") " pod="openstack/placement-db-sync-bnf58" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.960334 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc38060e-b831-4300-9f91-e62f1075da79-logs\") pod \"glance-default-external-api-0\" (UID: \"cc38060e-b831-4300-9f91-e62f1075da79\") " pod="openstack/glance-default-external-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.960622 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"cc38060e-b831-4300-9f91-e62f1075da79\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-external-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.960903 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-frwpd\" (UID: \"e6c79eff-4c2e-42b9-af82-df66c7091400\") " pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.961113 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-frwpd\" (UID: \"e6c79eff-4c2e-42b9-af82-df66c7091400\") " pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.963851 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-frwpd\" (UID: \"e6c79eff-4c2e-42b9-af82-df66c7091400\") " pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.975679 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-scripts\") pod \"placement-db-sync-bnf58\" (UID: \"4fdf0c77-68ae-41ff-b6b5-122baa461b8c\") " pod="openstack/placement-db-sync-bnf58" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.976360 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc38060e-b831-4300-9f91-e62f1075da79-config-data\") pod \"glance-default-external-api-0\" (UID: \"cc38060e-b831-4300-9f91-e62f1075da79\") " pod="openstack/glance-default-external-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.977884 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cc38060e-b831-4300-9f91-e62f1075da79-scripts\") pod \"glance-default-external-api-0\" (UID: \"cc38060e-b831-4300-9f91-e62f1075da79\") " pod="openstack/glance-default-external-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.978175 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-config-data\") pod \"placement-db-sync-bnf58\" (UID: \"4fdf0c77-68ae-41ff-b6b5-122baa461b8c\") " pod="openstack/placement-db-sync-bnf58" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.978826 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-combined-ca-bundle\") pod \"placement-db-sync-bnf58\" (UID: \"4fdf0c77-68ae-41ff-b6b5-122baa461b8c\") " pod="openstack/placement-db-sync-bnf58" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.980711 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxgtw\" (UniqueName: \"kubernetes.io/projected/e6c79eff-4c2e-42b9-af82-df66c7091400-kube-api-access-zxgtw\") pod \"dnsmasq-dns-8b5c85b87-frwpd\" (UID: \"e6c79eff-4c2e-42b9-af82-df66c7091400\") " pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.980959 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pmcw\" (UniqueName: \"kubernetes.io/projected/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-kube-api-access-9pmcw\") pod \"placement-db-sync-bnf58\" (UID: \"4fdf0c77-68ae-41ff-b6b5-122baa461b8c\") " pod="openstack/placement-db-sync-bnf58" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.984344 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w98jx\" (UniqueName: \"kubernetes.io/projected/cc38060e-b831-4300-9f91-e62f1075da79-kube-api-access-w98jx\") pod \"glance-default-external-api-0\" (UID: \"cc38060e-b831-4300-9f91-e62f1075da79\") " pod="openstack/glance-default-external-api-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.987234 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:36:42 crc kubenswrapper[5002]: I0930 12:36:42.990267 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc38060e-b831-4300-9f91-e62f1075da79-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"cc38060e-b831-4300-9f91-e62f1075da79\") " pod="openstack/glance-default-external-api-0" Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.061049 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"cc38060e-b831-4300-9f91-e62f1075da79\") " pod="openstack/glance-default-external-api-0" Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.136045 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-bnf58" Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.154883 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.222444 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.242902 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-799d9cbb9f-xj4zl"] Sep 30 12:36:43 crc kubenswrapper[5002]: W0930 12:36:43.292010 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod28380b30_cf01_44f7_8ec6_b1812a5e8435.slice/crio-8b9cf2c972cd245b0494fe689bf4ca39a5cf1bcf8dfd925148adbd20e9610ed3 WatchSource:0}: Error finding container 8b9cf2c972cd245b0494fe689bf4ca39a5cf1bcf8dfd925148adbd20e9610ed3: Status 404 returned error can't find the container with id 8b9cf2c972cd245b0494fe689bf4ca39a5cf1bcf8dfd925148adbd20e9610ed3 Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.344129 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.366855 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-g4bpt" Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.430357 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-j97p2"] Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.470875 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-dns-svc\") pod \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\" (UID: \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\") " Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.471026 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-config\") pod \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\" (UID: \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\") " Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.471058 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-dns-swift-storage-0\") pod \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\" (UID: \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\") " Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.471155 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-792q8\" (UniqueName: \"kubernetes.io/projected/137d5592-5ce3-4aa8-924c-d287bcd87d4c-kube-api-access-792q8\") pod \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\" (UID: \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\") " Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.471184 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-ovsdbserver-sb\") pod \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\" (UID: \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\") " Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.471211 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-ovsdbserver-nb\") pod \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\" (UID: \"137d5592-5ce3-4aa8-924c-d287bcd87d4c\") " Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.476394 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/137d5592-5ce3-4aa8-924c-d287bcd87d4c-kube-api-access-792q8" (OuterVolumeSpecName: "kube-api-access-792q8") pod "137d5592-5ce3-4aa8-924c-d287bcd87d4c" (UID: "137d5592-5ce3-4aa8-924c-d287bcd87d4c"). InnerVolumeSpecName "kube-api-access-792q8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.498802 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-config" (OuterVolumeSpecName: "config") pod "137d5592-5ce3-4aa8-924c-d287bcd87d4c" (UID: "137d5592-5ce3-4aa8-924c-d287bcd87d4c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.507257 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "137d5592-5ce3-4aa8-924c-d287bcd87d4c" (UID: "137d5592-5ce3-4aa8-924c-d287bcd87d4c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.509009 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "137d5592-5ce3-4aa8-924c-d287bcd87d4c" (UID: "137d5592-5ce3-4aa8-924c-d287bcd87d4c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.531417 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "137d5592-5ce3-4aa8-924c-d287bcd87d4c" (UID: "137d5592-5ce3-4aa8-924c-d287bcd87d4c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.531904 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "137d5592-5ce3-4aa8-924c-d287bcd87d4c" (UID: "137d5592-5ce3-4aa8-924c-d287bcd87d4c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.532804 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7cf8dd6b55-z9psc"] Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.544543 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-tt2dz"] Sep 30 12:36:43 crc kubenswrapper[5002]: W0930 12:36:43.546404 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7d2bf63f_4cce_402b_b774_e77dc172959c.slice/crio-eb75eb3ee65a0d84d162a72d4b054c2f5cec0985fc6a782f23b4cce7bfc20470 WatchSource:0}: Error finding container eb75eb3ee65a0d84d162a72d4b054c2f5cec0985fc6a782f23b4cce7bfc20470: Status 404 returned error can't find the container with id eb75eb3ee65a0d84d162a72d4b054c2f5cec0985fc6a782f23b4cce7bfc20470 Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.576347 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.576375 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.576407 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.576417 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.576426 5002 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/137d5592-5ce3-4aa8-924c-d287bcd87d4c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.576437 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-792q8\" (UniqueName: \"kubernetes.io/projected/137d5592-5ce3-4aa8-924c-d287bcd87d4c-kube-api-access-792q8\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.675723 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.792313 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tt2dz" event={"ID":"b8d82577-a47f-40e2-9986-5883fea9ddf5","Type":"ContainerStarted","Data":"942a543bf0d7664bbc16cd0c2e9b23e1dd807af774f2a94734acc7159cf5166e"} Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.794881 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-g4bpt" Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.796278 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-g4bpt" event={"ID":"137d5592-5ce3-4aa8-924c-d287bcd87d4c","Type":"ContainerDied","Data":"dd5d8a509e680e3a8933eb36a5e2cc80ef58a1c308fa99534abb12cb58b4a130"} Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.796345 5002 scope.go:117] "RemoveContainer" containerID="81e16a3e62248c1d3069baaf8390029d6b8a5f28380fe3ccf9d7b36c652564e6" Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.797666 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-799d9cbb9f-xj4zl" event={"ID":"28380b30-cf01-44f7-8ec6-b1812a5e8435","Type":"ContainerStarted","Data":"8b9cf2c972cd245b0494fe689bf4ca39a5cf1bcf8dfd925148adbd20e9610ed3"} Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.799824 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7cf8dd6b55-z9psc" event={"ID":"7d2bf63f-4cce-402b-b774-e77dc172959c","Type":"ContainerStarted","Data":"eb75eb3ee65a0d84d162a72d4b054c2f5cec0985fc6a782f23b4cce7bfc20470"} Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.802287 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c5cc7c5ff-j97p2" event={"ID":"94030f2b-828a-4de2-9975-688abbf7e3e9","Type":"ContainerStarted","Data":"19824d1039693f91677fd81f84e8df9621b5343ed543a16d7fdbf85e8a078f2f"} Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.805191 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22178f02-1a64-4a88-a564-fe143875e7df","Type":"ContainerStarted","Data":"63ecdbf9b209afe0b1d25296a77baa9be2af34eb735fa2fe1ff9a460d213ab7d"} Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.826527 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-frwpd"] Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.834167 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-bnf58"] Sep 30 12:36:43 crc kubenswrapper[5002]: W0930 12:36:43.863856 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4fdf0c77_68ae_41ff_b6b5_122baa461b8c.slice/crio-12233c05675c8fbfbf484b5dec280ffde667e0ac32fa8dbd55cf0c01d16dd854 WatchSource:0}: Error finding container 12233c05675c8fbfbf484b5dec280ffde667e0ac32fa8dbd55cf0c01d16dd854: Status 404 returned error can't find the container with id 12233c05675c8fbfbf484b5dec280ffde667e0ac32fa8dbd55cf0c01d16dd854 Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.896181 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-g4bpt"] Sep 30 12:36:43 crc kubenswrapper[5002]: I0930 12:36:43.922500 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-g4bpt"] Sep 30 12:36:44 crc kubenswrapper[5002]: I0930 12:36:44.063390 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 12:36:44 crc kubenswrapper[5002]: I0930 12:36:44.690944 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="137d5592-5ce3-4aa8-924c-d287bcd87d4c" path="/var/lib/kubelet/pods/137d5592-5ce3-4aa8-924c-d287bcd87d4c/volumes" Sep 30 12:36:44 crc kubenswrapper[5002]: I0930 12:36:44.817297 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tt2dz" event={"ID":"b8d82577-a47f-40e2-9986-5883fea9ddf5","Type":"ContainerStarted","Data":"0e29b308ed897f927e40965d64a3b8d17defa7c2ebd98f7009a18984fcf63925"} Sep 30 12:36:44 crc kubenswrapper[5002]: I0930 12:36:44.822053 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 12:36:44 crc kubenswrapper[5002]: I0930 12:36:44.829249 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-bnf58" event={"ID":"4fdf0c77-68ae-41ff-b6b5-122baa461b8c","Type":"ContainerStarted","Data":"12233c05675c8fbfbf484b5dec280ffde667e0ac32fa8dbd55cf0c01d16dd854"} Sep 30 12:36:44 crc kubenswrapper[5002]: I0930 12:36:44.835735 5002 generic.go:334] "Generic (PLEG): container finished" podID="e6c79eff-4c2e-42b9-af82-df66c7091400" containerID="d899ae74c228c6148c7d04b499e7e778900039ca21449b5f7f70729d363eed91" exitCode=0 Sep 30 12:36:44 crc kubenswrapper[5002]: I0930 12:36:44.835797 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" event={"ID":"e6c79eff-4c2e-42b9-af82-df66c7091400","Type":"ContainerDied","Data":"d899ae74c228c6148c7d04b499e7e778900039ca21449b5f7f70729d363eed91"} Sep 30 12:36:44 crc kubenswrapper[5002]: I0930 12:36:44.835823 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" event={"ID":"e6c79eff-4c2e-42b9-af82-df66c7091400","Type":"ContainerStarted","Data":"c6a2448193fde244abf77b8eea72555c50e2fd81229b7ac6c202bfc718909763"} Sep 30 12:36:44 crc kubenswrapper[5002]: I0930 12:36:44.838256 5002 generic.go:334] "Generic (PLEG): container finished" podID="94030f2b-828a-4de2-9975-688abbf7e3e9" containerID="6ef4af5bf62c04fd1f5f0caaa9848c3f31aaa741f7e41a606b34dea331c3be87" exitCode=0 Sep 30 12:36:44 crc kubenswrapper[5002]: I0930 12:36:44.838301 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c5cc7c5ff-j97p2" event={"ID":"94030f2b-828a-4de2-9975-688abbf7e3e9","Type":"ContainerDied","Data":"6ef4af5bf62c04fd1f5f0caaa9848c3f31aaa741f7e41a606b34dea331c3be87"} Sep 30 12:36:44 crc kubenswrapper[5002]: I0930 12:36:44.841585 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"14d6a446-4f72-417e-8bdd-af837eda2e70","Type":"ContainerStarted","Data":"9cfaf90944d3681da676e69ecb8c3719aecc5ffc5074ac818734df38142da6e2"} Sep 30 12:36:44 crc kubenswrapper[5002]: I0930 12:36:44.841618 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"14d6a446-4f72-417e-8bdd-af837eda2e70","Type":"ContainerStarted","Data":"7495e664c47a536f13a523bed9ecb6a1e2ab89fe397ef0d78fca0f58078ff413"} Sep 30 12:36:44 crc kubenswrapper[5002]: W0930 12:36:44.853031 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcc38060e_b831_4300_9f91_e62f1075da79.slice/crio-7b1395b086ab7451066f6ed28d50cfa07a8ec6d56a798cddbb29c97f664fcbb3 WatchSource:0}: Error finding container 7b1395b086ab7451066f6ed28d50cfa07a8ec6d56a798cddbb29c97f664fcbb3: Status 404 returned error can't find the container with id 7b1395b086ab7451066f6ed28d50cfa07a8ec6d56a798cddbb29c97f664fcbb3 Sep 30 12:36:44 crc kubenswrapper[5002]: I0930 12:36:44.861678 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-tt2dz" podStartSLOduration=2.861635423 podStartE2EDuration="2.861635423s" podCreationTimestamp="2025-09-30 12:36:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:36:44.840137605 +0000 UTC m=+979.089819841" watchObservedRunningTime="2025-09-30 12:36:44.861635423 +0000 UTC m=+979.111317589" Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.341687 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-j97p2" Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.520830 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-ovsdbserver-sb\") pod \"94030f2b-828a-4de2-9975-688abbf7e3e9\" (UID: \"94030f2b-828a-4de2-9975-688abbf7e3e9\") " Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.521248 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-ovsdbserver-nb\") pod \"94030f2b-828a-4de2-9975-688abbf7e3e9\" (UID: \"94030f2b-828a-4de2-9975-688abbf7e3e9\") " Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.521344 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-dns-swift-storage-0\") pod \"94030f2b-828a-4de2-9975-688abbf7e3e9\" (UID: \"94030f2b-828a-4de2-9975-688abbf7e3e9\") " Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.521416 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-dns-svc\") pod \"94030f2b-828a-4de2-9975-688abbf7e3e9\" (UID: \"94030f2b-828a-4de2-9975-688abbf7e3e9\") " Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.521450 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nwdbl\" (UniqueName: \"kubernetes.io/projected/94030f2b-828a-4de2-9975-688abbf7e3e9-kube-api-access-nwdbl\") pod \"94030f2b-828a-4de2-9975-688abbf7e3e9\" (UID: \"94030f2b-828a-4de2-9975-688abbf7e3e9\") " Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.521552 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-config\") pod \"94030f2b-828a-4de2-9975-688abbf7e3e9\" (UID: \"94030f2b-828a-4de2-9975-688abbf7e3e9\") " Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.527852 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94030f2b-828a-4de2-9975-688abbf7e3e9-kube-api-access-nwdbl" (OuterVolumeSpecName: "kube-api-access-nwdbl") pod "94030f2b-828a-4de2-9975-688abbf7e3e9" (UID: "94030f2b-828a-4de2-9975-688abbf7e3e9"). InnerVolumeSpecName "kube-api-access-nwdbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.543606 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "94030f2b-828a-4de2-9975-688abbf7e3e9" (UID: "94030f2b-828a-4de2-9975-688abbf7e3e9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.544266 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "94030f2b-828a-4de2-9975-688abbf7e3e9" (UID: "94030f2b-828a-4de2-9975-688abbf7e3e9"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.546700 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "94030f2b-828a-4de2-9975-688abbf7e3e9" (UID: "94030f2b-828a-4de2-9975-688abbf7e3e9"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.546711 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "94030f2b-828a-4de2-9975-688abbf7e3e9" (UID: "94030f2b-828a-4de2-9975-688abbf7e3e9"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.559925 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-config" (OuterVolumeSpecName: "config") pod "94030f2b-828a-4de2-9975-688abbf7e3e9" (UID: "94030f2b-828a-4de2-9975-688abbf7e3e9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.623888 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.623921 5002 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.623933 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.623972 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nwdbl\" (UniqueName: \"kubernetes.io/projected/94030f2b-828a-4de2-9975-688abbf7e3e9-kube-api-access-nwdbl\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.623983 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.623991 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/94030f2b-828a-4de2-9975-688abbf7e3e9-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.860414 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"14d6a446-4f72-417e-8bdd-af837eda2e70","Type":"ContainerStarted","Data":"1a11bac8fa92b6ef5bf7f5731730deb7d987f7936eeb25001fcc56ca768e2725"} Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.868862 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" event={"ID":"e6c79eff-4c2e-42b9-af82-df66c7091400","Type":"ContainerStarted","Data":"6a0976a3981404b2496dbd01df6e6cf6820c9aa403d6ea3e0c4f6fd651927352"} Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.868999 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.870661 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c5cc7c5ff-j97p2" event={"ID":"94030f2b-828a-4de2-9975-688abbf7e3e9","Type":"ContainerDied","Data":"19824d1039693f91677fd81f84e8df9621b5343ed543a16d7fdbf85e8a078f2f"} Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.870674 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-j97p2" Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.870695 5002 scope.go:117] "RemoveContainer" containerID="6ef4af5bf62c04fd1f5f0caaa9848c3f31aaa741f7e41a606b34dea331c3be87" Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.878302 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"cc38060e-b831-4300-9f91-e62f1075da79","Type":"ContainerStarted","Data":"7b1395b086ab7451066f6ed28d50cfa07a8ec6d56a798cddbb29c97f664fcbb3"} Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.907205 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" podStartSLOduration=3.907188184 podStartE2EDuration="3.907188184s" podCreationTimestamp="2025-09-30 12:36:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:36:45.903089899 +0000 UTC m=+980.152772065" watchObservedRunningTime="2025-09-30 12:36:45.907188184 +0000 UTC m=+980.156870330" Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.908605 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.90859444 podStartE2EDuration="3.90859444s" podCreationTimestamp="2025-09-30 12:36:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:36:45.884718702 +0000 UTC m=+980.134400858" watchObservedRunningTime="2025-09-30 12:36:45.90859444 +0000 UTC m=+980.158276586" Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.962115 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-j97p2"] Sep 30 12:36:45 crc kubenswrapper[5002]: I0930 12:36:45.968623 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-j97p2"] Sep 30 12:36:46 crc kubenswrapper[5002]: I0930 12:36:46.694184 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94030f2b-828a-4de2-9975-688abbf7e3e9" path="/var/lib/kubelet/pods/94030f2b-828a-4de2-9975-688abbf7e3e9/volumes" Sep 30 12:36:46 crc kubenswrapper[5002]: I0930 12:36:46.808316 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-c8b1-account-create-tkp6v"] Sep 30 12:36:46 crc kubenswrapper[5002]: E0930 12:36:46.808734 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94030f2b-828a-4de2-9975-688abbf7e3e9" containerName="init" Sep 30 12:36:46 crc kubenswrapper[5002]: I0930 12:36:46.808753 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="94030f2b-828a-4de2-9975-688abbf7e3e9" containerName="init" Sep 30 12:36:46 crc kubenswrapper[5002]: E0930 12:36:46.808766 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="137d5592-5ce3-4aa8-924c-d287bcd87d4c" containerName="init" Sep 30 12:36:46 crc kubenswrapper[5002]: I0930 12:36:46.808774 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="137d5592-5ce3-4aa8-924c-d287bcd87d4c" containerName="init" Sep 30 12:36:46 crc kubenswrapper[5002]: I0930 12:36:46.808932 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="137d5592-5ce3-4aa8-924c-d287bcd87d4c" containerName="init" Sep 30 12:36:46 crc kubenswrapper[5002]: I0930 12:36:46.808953 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="94030f2b-828a-4de2-9975-688abbf7e3e9" containerName="init" Sep 30 12:36:46 crc kubenswrapper[5002]: I0930 12:36:46.809516 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-c8b1-account-create-tkp6v" Sep 30 12:36:46 crc kubenswrapper[5002]: I0930 12:36:46.811961 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-c8b1-account-create-tkp6v"] Sep 30 12:36:46 crc kubenswrapper[5002]: I0930 12:36:46.812088 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Sep 30 12:36:46 crc kubenswrapper[5002]: I0930 12:36:46.859414 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-0907-account-create-r5dxh"] Sep 30 12:36:46 crc kubenswrapper[5002]: I0930 12:36:46.860545 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-0907-account-create-r5dxh" Sep 30 12:36:46 crc kubenswrapper[5002]: I0930 12:36:46.876819 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Sep 30 12:36:46 crc kubenswrapper[5002]: I0930 12:36:46.895123 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-0907-account-create-r5dxh"] Sep 30 12:36:46 crc kubenswrapper[5002]: I0930 12:36:46.906896 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"cc38060e-b831-4300-9f91-e62f1075da79","Type":"ContainerStarted","Data":"0b4707074aebf2d025ef4b985dd07c9692750ee98e7f6a36594735de1f63e364"} Sep 30 12:36:46 crc kubenswrapper[5002]: I0930 12:36:46.906951 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"cc38060e-b831-4300-9f91-e62f1075da79","Type":"ContainerStarted","Data":"9d185eb5745a75883dce234f0ddd099b8145cb59e56eb83ae3b7ff72f482cf28"} Sep 30 12:36:46 crc kubenswrapper[5002]: I0930 12:36:46.934068 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.93404207 podStartE2EDuration="4.93404207s" podCreationTimestamp="2025-09-30 12:36:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:36:46.926041235 +0000 UTC m=+981.175723381" watchObservedRunningTime="2025-09-30 12:36:46.93404207 +0000 UTC m=+981.183724216" Sep 30 12:36:46 crc kubenswrapper[5002]: I0930 12:36:46.964234 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9w76\" (UniqueName: \"kubernetes.io/projected/397fcd72-2dc0-4de0-9a5d-69fc3b14640a-kube-api-access-b9w76\") pod \"barbican-c8b1-account-create-tkp6v\" (UID: \"397fcd72-2dc0-4de0-9a5d-69fc3b14640a\") " pod="openstack/barbican-c8b1-account-create-tkp6v" Sep 30 12:36:46 crc kubenswrapper[5002]: I0930 12:36:46.964381 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmrl6\" (UniqueName: \"kubernetes.io/projected/153e0644-8b78-4c21-9377-a0d4c7cf0848-kube-api-access-fmrl6\") pod \"cinder-0907-account-create-r5dxh\" (UID: \"153e0644-8b78-4c21-9377-a0d4c7cf0848\") " pod="openstack/cinder-0907-account-create-r5dxh" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.023082 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.066253 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9w76\" (UniqueName: \"kubernetes.io/projected/397fcd72-2dc0-4de0-9a5d-69fc3b14640a-kube-api-access-b9w76\") pod \"barbican-c8b1-account-create-tkp6v\" (UID: \"397fcd72-2dc0-4de0-9a5d-69fc3b14640a\") " pod="openstack/barbican-c8b1-account-create-tkp6v" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.066388 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmrl6\" (UniqueName: \"kubernetes.io/projected/153e0644-8b78-4c21-9377-a0d4c7cf0848-kube-api-access-fmrl6\") pod \"cinder-0907-account-create-r5dxh\" (UID: \"153e0644-8b78-4c21-9377-a0d4c7cf0848\") " pod="openstack/cinder-0907-account-create-r5dxh" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.075955 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.078517 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-799d9cbb9f-xj4zl"] Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.091793 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-dcab-account-create-mxdfk"] Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.092878 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dcab-account-create-mxdfk" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.098412 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.106307 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dcab-account-create-mxdfk"] Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.107286 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmrl6\" (UniqueName: \"kubernetes.io/projected/153e0644-8b78-4c21-9377-a0d4c7cf0848-kube-api-access-fmrl6\") pod \"cinder-0907-account-create-r5dxh\" (UID: \"153e0644-8b78-4c21-9377-a0d4c7cf0848\") " pod="openstack/cinder-0907-account-create-r5dxh" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.110769 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9w76\" (UniqueName: \"kubernetes.io/projected/397fcd72-2dc0-4de0-9a5d-69fc3b14640a-kube-api-access-b9w76\") pod \"barbican-c8b1-account-create-tkp6v\" (UID: \"397fcd72-2dc0-4de0-9a5d-69fc3b14640a\") " pod="openstack/barbican-c8b1-account-create-tkp6v" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.128507 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-56d859b67f-fr8wt"] Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.133716 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-56d859b67f-fr8wt" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.139794 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-c8b1-account-create-tkp6v" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.168501 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5xtk\" (UniqueName: \"kubernetes.io/projected/67948d65-c149-45be-914e-30dc00325da6-kube-api-access-l5xtk\") pod \"neutron-dcab-account-create-mxdfk\" (UID: \"67948d65-c149-45be-914e-30dc00325da6\") " pod="openstack/neutron-dcab-account-create-mxdfk" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.178780 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-56d859b67f-fr8wt"] Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.184466 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.187794 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-0907-account-create-r5dxh" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.269954 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfmqz\" (UniqueName: \"kubernetes.io/projected/b44591be-b0af-4144-a646-7739c7ea1e69-kube-api-access-jfmqz\") pod \"horizon-56d859b67f-fr8wt\" (UID: \"b44591be-b0af-4144-a646-7739c7ea1e69\") " pod="openstack/horizon-56d859b67f-fr8wt" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.270001 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b44591be-b0af-4144-a646-7739c7ea1e69-logs\") pod \"horizon-56d859b67f-fr8wt\" (UID: \"b44591be-b0af-4144-a646-7739c7ea1e69\") " pod="openstack/horizon-56d859b67f-fr8wt" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.270048 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b44591be-b0af-4144-a646-7739c7ea1e69-scripts\") pod \"horizon-56d859b67f-fr8wt\" (UID: \"b44591be-b0af-4144-a646-7739c7ea1e69\") " pod="openstack/horizon-56d859b67f-fr8wt" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.270071 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b44591be-b0af-4144-a646-7739c7ea1e69-config-data\") pod \"horizon-56d859b67f-fr8wt\" (UID: \"b44591be-b0af-4144-a646-7739c7ea1e69\") " pod="openstack/horizon-56d859b67f-fr8wt" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.270130 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5xtk\" (UniqueName: \"kubernetes.io/projected/67948d65-c149-45be-914e-30dc00325da6-kube-api-access-l5xtk\") pod \"neutron-dcab-account-create-mxdfk\" (UID: \"67948d65-c149-45be-914e-30dc00325da6\") " pod="openstack/neutron-dcab-account-create-mxdfk" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.270190 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b44591be-b0af-4144-a646-7739c7ea1e69-horizon-secret-key\") pod \"horizon-56d859b67f-fr8wt\" (UID: \"b44591be-b0af-4144-a646-7739c7ea1e69\") " pod="openstack/horizon-56d859b67f-fr8wt" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.286280 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5xtk\" (UniqueName: \"kubernetes.io/projected/67948d65-c149-45be-914e-30dc00325da6-kube-api-access-l5xtk\") pod \"neutron-dcab-account-create-mxdfk\" (UID: \"67948d65-c149-45be-914e-30dc00325da6\") " pod="openstack/neutron-dcab-account-create-mxdfk" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.371508 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfmqz\" (UniqueName: \"kubernetes.io/projected/b44591be-b0af-4144-a646-7739c7ea1e69-kube-api-access-jfmqz\") pod \"horizon-56d859b67f-fr8wt\" (UID: \"b44591be-b0af-4144-a646-7739c7ea1e69\") " pod="openstack/horizon-56d859b67f-fr8wt" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.371548 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b44591be-b0af-4144-a646-7739c7ea1e69-logs\") pod \"horizon-56d859b67f-fr8wt\" (UID: \"b44591be-b0af-4144-a646-7739c7ea1e69\") " pod="openstack/horizon-56d859b67f-fr8wt" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.371590 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b44591be-b0af-4144-a646-7739c7ea1e69-scripts\") pod \"horizon-56d859b67f-fr8wt\" (UID: \"b44591be-b0af-4144-a646-7739c7ea1e69\") " pod="openstack/horizon-56d859b67f-fr8wt" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.371609 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b44591be-b0af-4144-a646-7739c7ea1e69-config-data\") pod \"horizon-56d859b67f-fr8wt\" (UID: \"b44591be-b0af-4144-a646-7739c7ea1e69\") " pod="openstack/horizon-56d859b67f-fr8wt" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.371685 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b44591be-b0af-4144-a646-7739c7ea1e69-horizon-secret-key\") pod \"horizon-56d859b67f-fr8wt\" (UID: \"b44591be-b0af-4144-a646-7739c7ea1e69\") " pod="openstack/horizon-56d859b67f-fr8wt" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.372050 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b44591be-b0af-4144-a646-7739c7ea1e69-logs\") pod \"horizon-56d859b67f-fr8wt\" (UID: \"b44591be-b0af-4144-a646-7739c7ea1e69\") " pod="openstack/horizon-56d859b67f-fr8wt" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.374707 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b44591be-b0af-4144-a646-7739c7ea1e69-scripts\") pod \"horizon-56d859b67f-fr8wt\" (UID: \"b44591be-b0af-4144-a646-7739c7ea1e69\") " pod="openstack/horizon-56d859b67f-fr8wt" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.375282 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b44591be-b0af-4144-a646-7739c7ea1e69-config-data\") pod \"horizon-56d859b67f-fr8wt\" (UID: \"b44591be-b0af-4144-a646-7739c7ea1e69\") " pod="openstack/horizon-56d859b67f-fr8wt" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.377086 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b44591be-b0af-4144-a646-7739c7ea1e69-horizon-secret-key\") pod \"horizon-56d859b67f-fr8wt\" (UID: \"b44591be-b0af-4144-a646-7739c7ea1e69\") " pod="openstack/horizon-56d859b67f-fr8wt" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.387129 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfmqz\" (UniqueName: \"kubernetes.io/projected/b44591be-b0af-4144-a646-7739c7ea1e69-kube-api-access-jfmqz\") pod \"horizon-56d859b67f-fr8wt\" (UID: \"b44591be-b0af-4144-a646-7739c7ea1e69\") " pod="openstack/horizon-56d859b67f-fr8wt" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.477331 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dcab-account-create-mxdfk" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.495931 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-56d859b67f-fr8wt" Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.912375 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="14d6a446-4f72-417e-8bdd-af837eda2e70" containerName="glance-log" containerID="cri-o://9cfaf90944d3681da676e69ecb8c3719aecc5ffc5074ac818734df38142da6e2" gracePeriod=30 Sep 30 12:36:47 crc kubenswrapper[5002]: I0930 12:36:47.913407 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="14d6a446-4f72-417e-8bdd-af837eda2e70" containerName="glance-httpd" containerID="cri-o://1a11bac8fa92b6ef5bf7f5731730deb7d987f7936eeb25001fcc56ca768e2725" gracePeriod=30 Sep 30 12:36:48 crc kubenswrapper[5002]: I0930 12:36:48.925209 5002 generic.go:334] "Generic (PLEG): container finished" podID="b8d82577-a47f-40e2-9986-5883fea9ddf5" containerID="0e29b308ed897f927e40965d64a3b8d17defa7c2ebd98f7009a18984fcf63925" exitCode=0 Sep 30 12:36:48 crc kubenswrapper[5002]: I0930 12:36:48.925569 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tt2dz" event={"ID":"b8d82577-a47f-40e2-9986-5883fea9ddf5","Type":"ContainerDied","Data":"0e29b308ed897f927e40965d64a3b8d17defa7c2ebd98f7009a18984fcf63925"} Sep 30 12:36:48 crc kubenswrapper[5002]: I0930 12:36:48.941806 5002 generic.go:334] "Generic (PLEG): container finished" podID="14d6a446-4f72-417e-8bdd-af837eda2e70" containerID="1a11bac8fa92b6ef5bf7f5731730deb7d987f7936eeb25001fcc56ca768e2725" exitCode=0 Sep 30 12:36:48 crc kubenswrapper[5002]: I0930 12:36:48.941834 5002 generic.go:334] "Generic (PLEG): container finished" podID="14d6a446-4f72-417e-8bdd-af837eda2e70" containerID="9cfaf90944d3681da676e69ecb8c3719aecc5ffc5074ac818734df38142da6e2" exitCode=143 Sep 30 12:36:48 crc kubenswrapper[5002]: I0930 12:36:48.941995 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="cc38060e-b831-4300-9f91-e62f1075da79" containerName="glance-log" containerID="cri-o://0b4707074aebf2d025ef4b985dd07c9692750ee98e7f6a36594735de1f63e364" gracePeriod=30 Sep 30 12:36:48 crc kubenswrapper[5002]: I0930 12:36:48.942247 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"14d6a446-4f72-417e-8bdd-af837eda2e70","Type":"ContainerDied","Data":"1a11bac8fa92b6ef5bf7f5731730deb7d987f7936eeb25001fcc56ca768e2725"} Sep 30 12:36:48 crc kubenswrapper[5002]: I0930 12:36:48.942272 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"14d6a446-4f72-417e-8bdd-af837eda2e70","Type":"ContainerDied","Data":"9cfaf90944d3681da676e69ecb8c3719aecc5ffc5074ac818734df38142da6e2"} Sep 30 12:36:48 crc kubenswrapper[5002]: I0930 12:36:48.942247 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="cc38060e-b831-4300-9f91-e62f1075da79" containerName="glance-httpd" containerID="cri-o://9d185eb5745a75883dce234f0ddd099b8145cb59e56eb83ae3b7ff72f482cf28" gracePeriod=30 Sep 30 12:36:49 crc kubenswrapper[5002]: I0930 12:36:49.960849 5002 generic.go:334] "Generic (PLEG): container finished" podID="cc38060e-b831-4300-9f91-e62f1075da79" containerID="9d185eb5745a75883dce234f0ddd099b8145cb59e56eb83ae3b7ff72f482cf28" exitCode=0 Sep 30 12:36:49 crc kubenswrapper[5002]: I0930 12:36:49.961122 5002 generic.go:334] "Generic (PLEG): container finished" podID="cc38060e-b831-4300-9f91-e62f1075da79" containerID="0b4707074aebf2d025ef4b985dd07c9692750ee98e7f6a36594735de1f63e364" exitCode=143 Sep 30 12:36:49 crc kubenswrapper[5002]: I0930 12:36:49.960928 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"cc38060e-b831-4300-9f91-e62f1075da79","Type":"ContainerDied","Data":"9d185eb5745a75883dce234f0ddd099b8145cb59e56eb83ae3b7ff72f482cf28"} Sep 30 12:36:49 crc kubenswrapper[5002]: I0930 12:36:49.961293 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"cc38060e-b831-4300-9f91-e62f1075da79","Type":"ContainerDied","Data":"0b4707074aebf2d025ef4b985dd07c9692750ee98e7f6a36594735de1f63e364"} Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.156218 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.204800 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-6kcs9"] Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.205002 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" podUID="cd808dd7-6b41-4208-8f6e-c54bea4d22c5" containerName="dnsmasq-dns" containerID="cri-o://7258208ab5a087fe79abec802c051a0fafc3d3516365918b10268067678c5af2" gracePeriod=10 Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.503736 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7cf8dd6b55-z9psc"] Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.549339 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-f8777b74-fpwh2"] Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.553209 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.558637 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.573721 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-f8777b74-fpwh2"] Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.663205 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-56d859b67f-fr8wt"] Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.670864 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fd4d1d88-c894-496a-b2ee-00bf80fa2415-logs\") pod \"horizon-f8777b74-fpwh2\" (UID: \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\") " pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.670994 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fd4d1d88-c894-496a-b2ee-00bf80fa2415-config-data\") pod \"horizon-f8777b74-fpwh2\" (UID: \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\") " pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.671036 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd4d1d88-c894-496a-b2ee-00bf80fa2415-horizon-tls-certs\") pod \"horizon-f8777b74-fpwh2\" (UID: \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\") " pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.671258 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlzft\" (UniqueName: \"kubernetes.io/projected/fd4d1d88-c894-496a-b2ee-00bf80fa2415-kube-api-access-hlzft\") pod \"horizon-f8777b74-fpwh2\" (UID: \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\") " pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.671312 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fd4d1d88-c894-496a-b2ee-00bf80fa2415-horizon-secret-key\") pod \"horizon-f8777b74-fpwh2\" (UID: \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\") " pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.671334 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fd4d1d88-c894-496a-b2ee-00bf80fa2415-scripts\") pod \"horizon-f8777b74-fpwh2\" (UID: \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\") " pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.671360 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd4d1d88-c894-496a-b2ee-00bf80fa2415-combined-ca-bundle\") pod \"horizon-f8777b74-fpwh2\" (UID: \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\") " pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.696884 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-66c799f4f6-jprmr"] Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.698338 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-66c799f4f6-jprmr" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.720137 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-66c799f4f6-jprmr"] Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.773457 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fd4d1d88-c894-496a-b2ee-00bf80fa2415-logs\") pod \"horizon-f8777b74-fpwh2\" (UID: \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\") " pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.773575 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fd4d1d88-c894-496a-b2ee-00bf80fa2415-config-data\") pod \"horizon-f8777b74-fpwh2\" (UID: \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\") " pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.773602 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd4d1d88-c894-496a-b2ee-00bf80fa2415-horizon-tls-certs\") pod \"horizon-f8777b74-fpwh2\" (UID: \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\") " pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.773724 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlzft\" (UniqueName: \"kubernetes.io/projected/fd4d1d88-c894-496a-b2ee-00bf80fa2415-kube-api-access-hlzft\") pod \"horizon-f8777b74-fpwh2\" (UID: \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\") " pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.773754 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdrtg\" (UniqueName: \"kubernetes.io/projected/7c12a4dd-a3df-4106-ab48-b628b89b3277-kube-api-access-cdrtg\") pod \"horizon-66c799f4f6-jprmr\" (UID: \"7c12a4dd-a3df-4106-ab48-b628b89b3277\") " pod="openstack/horizon-66c799f4f6-jprmr" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.773788 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7c12a4dd-a3df-4106-ab48-b628b89b3277-config-data\") pod \"horizon-66c799f4f6-jprmr\" (UID: \"7c12a4dd-a3df-4106-ab48-b628b89b3277\") " pod="openstack/horizon-66c799f4f6-jprmr" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.773815 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fd4d1d88-c894-496a-b2ee-00bf80fa2415-horizon-secret-key\") pod \"horizon-f8777b74-fpwh2\" (UID: \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\") " pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.773838 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7c12a4dd-a3df-4106-ab48-b628b89b3277-horizon-secret-key\") pod \"horizon-66c799f4f6-jprmr\" (UID: \"7c12a4dd-a3df-4106-ab48-b628b89b3277\") " pod="openstack/horizon-66c799f4f6-jprmr" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.773862 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fd4d1d88-c894-496a-b2ee-00bf80fa2415-scripts\") pod \"horizon-f8777b74-fpwh2\" (UID: \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\") " pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.773884 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c12a4dd-a3df-4106-ab48-b628b89b3277-horizon-tls-certs\") pod \"horizon-66c799f4f6-jprmr\" (UID: \"7c12a4dd-a3df-4106-ab48-b628b89b3277\") " pod="openstack/horizon-66c799f4f6-jprmr" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.773887 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fd4d1d88-c894-496a-b2ee-00bf80fa2415-logs\") pod \"horizon-f8777b74-fpwh2\" (UID: \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\") " pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.773911 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd4d1d88-c894-496a-b2ee-00bf80fa2415-combined-ca-bundle\") pod \"horizon-f8777b74-fpwh2\" (UID: \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\") " pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.773943 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7c12a4dd-a3df-4106-ab48-b628b89b3277-scripts\") pod \"horizon-66c799f4f6-jprmr\" (UID: \"7c12a4dd-a3df-4106-ab48-b628b89b3277\") " pod="openstack/horizon-66c799f4f6-jprmr" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.773978 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c12a4dd-a3df-4106-ab48-b628b89b3277-combined-ca-bundle\") pod \"horizon-66c799f4f6-jprmr\" (UID: \"7c12a4dd-a3df-4106-ab48-b628b89b3277\") " pod="openstack/horizon-66c799f4f6-jprmr" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.774030 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c12a4dd-a3df-4106-ab48-b628b89b3277-logs\") pod \"horizon-66c799f4f6-jprmr\" (UID: \"7c12a4dd-a3df-4106-ab48-b628b89b3277\") " pod="openstack/horizon-66c799f4f6-jprmr" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.774673 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fd4d1d88-c894-496a-b2ee-00bf80fa2415-scripts\") pod \"horizon-f8777b74-fpwh2\" (UID: \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\") " pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.775000 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fd4d1d88-c894-496a-b2ee-00bf80fa2415-config-data\") pod \"horizon-f8777b74-fpwh2\" (UID: \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\") " pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.782197 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fd4d1d88-c894-496a-b2ee-00bf80fa2415-horizon-secret-key\") pod \"horizon-f8777b74-fpwh2\" (UID: \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\") " pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.782613 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd4d1d88-c894-496a-b2ee-00bf80fa2415-combined-ca-bundle\") pod \"horizon-f8777b74-fpwh2\" (UID: \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\") " pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.785908 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd4d1d88-c894-496a-b2ee-00bf80fa2415-horizon-tls-certs\") pod \"horizon-f8777b74-fpwh2\" (UID: \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\") " pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.793980 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlzft\" (UniqueName: \"kubernetes.io/projected/fd4d1d88-c894-496a-b2ee-00bf80fa2415-kube-api-access-hlzft\") pod \"horizon-f8777b74-fpwh2\" (UID: \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\") " pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.875887 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c12a4dd-a3df-4106-ab48-b628b89b3277-logs\") pod \"horizon-66c799f4f6-jprmr\" (UID: \"7c12a4dd-a3df-4106-ab48-b628b89b3277\") " pod="openstack/horizon-66c799f4f6-jprmr" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.876007 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdrtg\" (UniqueName: \"kubernetes.io/projected/7c12a4dd-a3df-4106-ab48-b628b89b3277-kube-api-access-cdrtg\") pod \"horizon-66c799f4f6-jprmr\" (UID: \"7c12a4dd-a3df-4106-ab48-b628b89b3277\") " pod="openstack/horizon-66c799f4f6-jprmr" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.876032 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7c12a4dd-a3df-4106-ab48-b628b89b3277-config-data\") pod \"horizon-66c799f4f6-jprmr\" (UID: \"7c12a4dd-a3df-4106-ab48-b628b89b3277\") " pod="openstack/horizon-66c799f4f6-jprmr" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.876058 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7c12a4dd-a3df-4106-ab48-b628b89b3277-horizon-secret-key\") pod \"horizon-66c799f4f6-jprmr\" (UID: \"7c12a4dd-a3df-4106-ab48-b628b89b3277\") " pod="openstack/horizon-66c799f4f6-jprmr" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.876076 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c12a4dd-a3df-4106-ab48-b628b89b3277-horizon-tls-certs\") pod \"horizon-66c799f4f6-jprmr\" (UID: \"7c12a4dd-a3df-4106-ab48-b628b89b3277\") " pod="openstack/horizon-66c799f4f6-jprmr" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.876104 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7c12a4dd-a3df-4106-ab48-b628b89b3277-scripts\") pod \"horizon-66c799f4f6-jprmr\" (UID: \"7c12a4dd-a3df-4106-ab48-b628b89b3277\") " pod="openstack/horizon-66c799f4f6-jprmr" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.876131 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c12a4dd-a3df-4106-ab48-b628b89b3277-combined-ca-bundle\") pod \"horizon-66c799f4f6-jprmr\" (UID: \"7c12a4dd-a3df-4106-ab48-b628b89b3277\") " pod="openstack/horizon-66c799f4f6-jprmr" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.876820 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c12a4dd-a3df-4106-ab48-b628b89b3277-logs\") pod \"horizon-66c799f4f6-jprmr\" (UID: \"7c12a4dd-a3df-4106-ab48-b628b89b3277\") " pod="openstack/horizon-66c799f4f6-jprmr" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.876990 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7c12a4dd-a3df-4106-ab48-b628b89b3277-scripts\") pod \"horizon-66c799f4f6-jprmr\" (UID: \"7c12a4dd-a3df-4106-ab48-b628b89b3277\") " pod="openstack/horizon-66c799f4f6-jprmr" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.877085 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7c12a4dd-a3df-4106-ab48-b628b89b3277-config-data\") pod \"horizon-66c799f4f6-jprmr\" (UID: \"7c12a4dd-a3df-4106-ab48-b628b89b3277\") " pod="openstack/horizon-66c799f4f6-jprmr" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.879074 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.879700 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c12a4dd-a3df-4106-ab48-b628b89b3277-horizon-tls-certs\") pod \"horizon-66c799f4f6-jprmr\" (UID: \"7c12a4dd-a3df-4106-ab48-b628b89b3277\") " pod="openstack/horizon-66c799f4f6-jprmr" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.879933 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c12a4dd-a3df-4106-ab48-b628b89b3277-combined-ca-bundle\") pod \"horizon-66c799f4f6-jprmr\" (UID: \"7c12a4dd-a3df-4106-ab48-b628b89b3277\") " pod="openstack/horizon-66c799f4f6-jprmr" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.894867 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7c12a4dd-a3df-4106-ab48-b628b89b3277-horizon-secret-key\") pod \"horizon-66c799f4f6-jprmr\" (UID: \"7c12a4dd-a3df-4106-ab48-b628b89b3277\") " pod="openstack/horizon-66c799f4f6-jprmr" Sep 30 12:36:53 crc kubenswrapper[5002]: I0930 12:36:53.897888 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdrtg\" (UniqueName: \"kubernetes.io/projected/7c12a4dd-a3df-4106-ab48-b628b89b3277-kube-api-access-cdrtg\") pod \"horizon-66c799f4f6-jprmr\" (UID: \"7c12a4dd-a3df-4106-ab48-b628b89b3277\") " pod="openstack/horizon-66c799f4f6-jprmr" Sep 30 12:36:54 crc kubenswrapper[5002]: I0930 12:36:54.006925 5002 generic.go:334] "Generic (PLEG): container finished" podID="cd808dd7-6b41-4208-8f6e-c54bea4d22c5" containerID="7258208ab5a087fe79abec802c051a0fafc3d3516365918b10268067678c5af2" exitCode=0 Sep 30 12:36:54 crc kubenswrapper[5002]: I0930 12:36:54.006973 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" event={"ID":"cd808dd7-6b41-4208-8f6e-c54bea4d22c5","Type":"ContainerDied","Data":"7258208ab5a087fe79abec802c051a0fafc3d3516365918b10268067678c5af2"} Sep 30 12:36:54 crc kubenswrapper[5002]: I0930 12:36:54.022666 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-66c799f4f6-jprmr" Sep 30 12:36:56 crc kubenswrapper[5002]: I0930 12:36:56.989240 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" podUID="cd808dd7-6b41-4208-8f6e-c54bea4d22c5" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.124:5353: connect: connection refused" Sep 30 12:37:00 crc kubenswrapper[5002]: I0930 12:37:00.935449 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tt2dz" Sep 30 12:37:00 crc kubenswrapper[5002]: I0930 12:37:00.999181 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-config-data\") pod \"b8d82577-a47f-40e2-9986-5883fea9ddf5\" (UID: \"b8d82577-a47f-40e2-9986-5883fea9ddf5\") " Sep 30 12:37:00 crc kubenswrapper[5002]: I0930 12:37:00.999272 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mmgms\" (UniqueName: \"kubernetes.io/projected/b8d82577-a47f-40e2-9986-5883fea9ddf5-kube-api-access-mmgms\") pod \"b8d82577-a47f-40e2-9986-5883fea9ddf5\" (UID: \"b8d82577-a47f-40e2-9986-5883fea9ddf5\") " Sep 30 12:37:00 crc kubenswrapper[5002]: I0930 12:37:00.999314 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-fernet-keys\") pod \"b8d82577-a47f-40e2-9986-5883fea9ddf5\" (UID: \"b8d82577-a47f-40e2-9986-5883fea9ddf5\") " Sep 30 12:37:00 crc kubenswrapper[5002]: I0930 12:37:00.999431 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-combined-ca-bundle\") pod \"b8d82577-a47f-40e2-9986-5883fea9ddf5\" (UID: \"b8d82577-a47f-40e2-9986-5883fea9ddf5\") " Sep 30 12:37:00 crc kubenswrapper[5002]: I0930 12:37:00.999526 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-credential-keys\") pod \"b8d82577-a47f-40e2-9986-5883fea9ddf5\" (UID: \"b8d82577-a47f-40e2-9986-5883fea9ddf5\") " Sep 30 12:37:00 crc kubenswrapper[5002]: I0930 12:37:00.999569 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-scripts\") pod \"b8d82577-a47f-40e2-9986-5883fea9ddf5\" (UID: \"b8d82577-a47f-40e2-9986-5883fea9ddf5\") " Sep 30 12:37:01 crc kubenswrapper[5002]: I0930 12:37:01.005443 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "b8d82577-a47f-40e2-9986-5883fea9ddf5" (UID: "b8d82577-a47f-40e2-9986-5883fea9ddf5"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:01 crc kubenswrapper[5002]: I0930 12:37:01.005651 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "b8d82577-a47f-40e2-9986-5883fea9ddf5" (UID: "b8d82577-a47f-40e2-9986-5883fea9ddf5"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:01 crc kubenswrapper[5002]: I0930 12:37:01.005702 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8d82577-a47f-40e2-9986-5883fea9ddf5-kube-api-access-mmgms" (OuterVolumeSpecName: "kube-api-access-mmgms") pod "b8d82577-a47f-40e2-9986-5883fea9ddf5" (UID: "b8d82577-a47f-40e2-9986-5883fea9ddf5"). InnerVolumeSpecName "kube-api-access-mmgms". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:37:01 crc kubenswrapper[5002]: I0930 12:37:01.016637 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-scripts" (OuterVolumeSpecName: "scripts") pod "b8d82577-a47f-40e2-9986-5883fea9ddf5" (UID: "b8d82577-a47f-40e2-9986-5883fea9ddf5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:01 crc kubenswrapper[5002]: I0930 12:37:01.027030 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-config-data" (OuterVolumeSpecName: "config-data") pod "b8d82577-a47f-40e2-9986-5883fea9ddf5" (UID: "b8d82577-a47f-40e2-9986-5883fea9ddf5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:01 crc kubenswrapper[5002]: I0930 12:37:01.040828 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b8d82577-a47f-40e2-9986-5883fea9ddf5" (UID: "b8d82577-a47f-40e2-9986-5883fea9ddf5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:01 crc kubenswrapper[5002]: I0930 12:37:01.068957 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tt2dz" event={"ID":"b8d82577-a47f-40e2-9986-5883fea9ddf5","Type":"ContainerDied","Data":"942a543bf0d7664bbc16cd0c2e9b23e1dd807af774f2a94734acc7159cf5166e"} Sep 30 12:37:01 crc kubenswrapper[5002]: I0930 12:37:01.069011 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="942a543bf0d7664bbc16cd0c2e9b23e1dd807af774f2a94734acc7159cf5166e" Sep 30 12:37:01 crc kubenswrapper[5002]: I0930 12:37:01.069158 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tt2dz" Sep 30 12:37:01 crc kubenswrapper[5002]: I0930 12:37:01.102140 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:01 crc kubenswrapper[5002]: I0930 12:37:01.102209 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mmgms\" (UniqueName: \"kubernetes.io/projected/b8d82577-a47f-40e2-9986-5883fea9ddf5-kube-api-access-mmgms\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:01 crc kubenswrapper[5002]: I0930 12:37:01.102241 5002 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:01 crc kubenswrapper[5002]: I0930 12:37:01.102262 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:01 crc kubenswrapper[5002]: I0930 12:37:01.102282 5002 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:01 crc kubenswrapper[5002]: I0930 12:37:01.102300 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8d82577-a47f-40e2-9986-5883fea9ddf5-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:01 crc kubenswrapper[5002]: E0930 12:37:01.379892 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Sep 30 12:37:01 crc kubenswrapper[5002]: E0930 12:37:01.380204 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n654h696h6bh64h58ch646h684h68fhbfh588h675hb4hd7h654h689h564h5fbh98h5bdh5ch5b7h4h8h656h578h6dh5b9hf8h78hd4h694h5d8q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-blm2s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-7cf8dd6b55-z9psc_openstack(7d2bf63f-4cce-402b-b774-e77dc172959c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 12:37:01 crc kubenswrapper[5002]: E0930 12:37:01.383151 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-7cf8dd6b55-z9psc" podUID="7d2bf63f-4cce-402b-b774-e77dc172959c" Sep 30 12:37:01 crc kubenswrapper[5002]: I0930 12:37:01.988690 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" podUID="cd808dd7-6b41-4208-8f6e-c54bea4d22c5" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.124:5353: connect: connection refused" Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.033261 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-tt2dz"] Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.040598 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-tt2dz"] Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.125661 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-88rff"] Sep 30 12:37:02 crc kubenswrapper[5002]: E0930 12:37:02.126146 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8d82577-a47f-40e2-9986-5883fea9ddf5" containerName="keystone-bootstrap" Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.126167 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8d82577-a47f-40e2-9986-5883fea9ddf5" containerName="keystone-bootstrap" Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.126564 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8d82577-a47f-40e2-9986-5883fea9ddf5" containerName="keystone-bootstrap" Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.127730 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-88rff" Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.130077 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-n9gzw" Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.130267 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.131681 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.131954 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.138556 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-88rff"] Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.221937 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-config-data\") pod \"keystone-bootstrap-88rff\" (UID: \"418c45be-fdf8-41be-899d-a75e1ab0acef\") " pod="openstack/keystone-bootstrap-88rff" Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.222109 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7xznn\" (UniqueName: \"kubernetes.io/projected/418c45be-fdf8-41be-899d-a75e1ab0acef-kube-api-access-7xznn\") pod \"keystone-bootstrap-88rff\" (UID: \"418c45be-fdf8-41be-899d-a75e1ab0acef\") " pod="openstack/keystone-bootstrap-88rff" Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.222208 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-fernet-keys\") pod \"keystone-bootstrap-88rff\" (UID: \"418c45be-fdf8-41be-899d-a75e1ab0acef\") " pod="openstack/keystone-bootstrap-88rff" Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.222347 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-combined-ca-bundle\") pod \"keystone-bootstrap-88rff\" (UID: \"418c45be-fdf8-41be-899d-a75e1ab0acef\") " pod="openstack/keystone-bootstrap-88rff" Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.222396 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-credential-keys\") pod \"keystone-bootstrap-88rff\" (UID: \"418c45be-fdf8-41be-899d-a75e1ab0acef\") " pod="openstack/keystone-bootstrap-88rff" Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.222526 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-scripts\") pod \"keystone-bootstrap-88rff\" (UID: \"418c45be-fdf8-41be-899d-a75e1ab0acef\") " pod="openstack/keystone-bootstrap-88rff" Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.324344 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-combined-ca-bundle\") pod \"keystone-bootstrap-88rff\" (UID: \"418c45be-fdf8-41be-899d-a75e1ab0acef\") " pod="openstack/keystone-bootstrap-88rff" Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.324416 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-credential-keys\") pod \"keystone-bootstrap-88rff\" (UID: \"418c45be-fdf8-41be-899d-a75e1ab0acef\") " pod="openstack/keystone-bootstrap-88rff" Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.324455 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-scripts\") pod \"keystone-bootstrap-88rff\" (UID: \"418c45be-fdf8-41be-899d-a75e1ab0acef\") " pod="openstack/keystone-bootstrap-88rff" Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.324549 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-config-data\") pod \"keystone-bootstrap-88rff\" (UID: \"418c45be-fdf8-41be-899d-a75e1ab0acef\") " pod="openstack/keystone-bootstrap-88rff" Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.324599 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7xznn\" (UniqueName: \"kubernetes.io/projected/418c45be-fdf8-41be-899d-a75e1ab0acef-kube-api-access-7xznn\") pod \"keystone-bootstrap-88rff\" (UID: \"418c45be-fdf8-41be-899d-a75e1ab0acef\") " pod="openstack/keystone-bootstrap-88rff" Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.324627 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-fernet-keys\") pod \"keystone-bootstrap-88rff\" (UID: \"418c45be-fdf8-41be-899d-a75e1ab0acef\") " pod="openstack/keystone-bootstrap-88rff" Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.332967 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-combined-ca-bundle\") pod \"keystone-bootstrap-88rff\" (UID: \"418c45be-fdf8-41be-899d-a75e1ab0acef\") " pod="openstack/keystone-bootstrap-88rff" Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.334315 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-credential-keys\") pod \"keystone-bootstrap-88rff\" (UID: \"418c45be-fdf8-41be-899d-a75e1ab0acef\") " pod="openstack/keystone-bootstrap-88rff" Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.334508 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-scripts\") pod \"keystone-bootstrap-88rff\" (UID: \"418c45be-fdf8-41be-899d-a75e1ab0acef\") " pod="openstack/keystone-bootstrap-88rff" Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.343296 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-fernet-keys\") pod \"keystone-bootstrap-88rff\" (UID: \"418c45be-fdf8-41be-899d-a75e1ab0acef\") " pod="openstack/keystone-bootstrap-88rff" Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.345148 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-config-data\") pod \"keystone-bootstrap-88rff\" (UID: \"418c45be-fdf8-41be-899d-a75e1ab0acef\") " pod="openstack/keystone-bootstrap-88rff" Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.348231 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7xznn\" (UniqueName: \"kubernetes.io/projected/418c45be-fdf8-41be-899d-a75e1ab0acef-kube-api-access-7xznn\") pod \"keystone-bootstrap-88rff\" (UID: \"418c45be-fdf8-41be-899d-a75e1ab0acef\") " pod="openstack/keystone-bootstrap-88rff" Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.524422 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-88rff" Sep 30 12:37:02 crc kubenswrapper[5002]: E0930 12:37:02.658321 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified" Sep 30 12:37:02 crc kubenswrapper[5002]: E0930 12:37:02.658581 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n65dh7hf8h557h5cdh55h68dh55dh555hddh5ffh97h7bh5fch5f8h559h648h5cfh59dh686h5fch5c9h5ffh5dfh58hch55ch64dhffh7chcbh68cq,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v27pl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(22178f02-1a64-4a88-a564-fe143875e7df): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 12:37:02 crc kubenswrapper[5002]: I0930 12:37:02.688671 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8d82577-a47f-40e2-9986-5883fea9ddf5" path="/var/lib/kubelet/pods/b8d82577-a47f-40e2-9986-5883fea9ddf5/volumes" Sep 30 12:37:04 crc kubenswrapper[5002]: E0930 12:37:04.597093 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-placement-api:current-podified" Sep 30 12:37:04 crc kubenswrapper[5002]: E0930 12:37:04.597434 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9pmcw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-bnf58_openstack(4fdf0c77-68ae-41ff-b6b5-122baa461b8c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 12:37:04 crc kubenswrapper[5002]: E0930 12:37:04.598654 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-bnf58" podUID="4fdf0c77-68ae-41ff-b6b5-122baa461b8c" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.712342 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.714100 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.757081 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7cf8dd6b55-z9psc" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.767092 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/14d6a446-4f72-417e-8bdd-af837eda2e70-httpd-run\") pod \"14d6a446-4f72-417e-8bdd-af837eda2e70\" (UID: \"14d6a446-4f72-417e-8bdd-af837eda2e70\") " Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.767125 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"cc38060e-b831-4300-9f91-e62f1075da79\" (UID: \"cc38060e-b831-4300-9f91-e62f1075da79\") " Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.767158 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc38060e-b831-4300-9f91-e62f1075da79-config-data\") pod \"cc38060e-b831-4300-9f91-e62f1075da79\" (UID: \"cc38060e-b831-4300-9f91-e62f1075da79\") " Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.767240 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14d6a446-4f72-417e-8bdd-af837eda2e70-config-data\") pod \"14d6a446-4f72-417e-8bdd-af837eda2e70\" (UID: \"14d6a446-4f72-417e-8bdd-af837eda2e70\") " Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.767302 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14d6a446-4f72-417e-8bdd-af837eda2e70-combined-ca-bundle\") pod \"14d6a446-4f72-417e-8bdd-af837eda2e70\" (UID: \"14d6a446-4f72-417e-8bdd-af837eda2e70\") " Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.767354 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc38060e-b831-4300-9f91-e62f1075da79-logs\") pod \"cc38060e-b831-4300-9f91-e62f1075da79\" (UID: \"cc38060e-b831-4300-9f91-e62f1075da79\") " Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.767392 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cc38060e-b831-4300-9f91-e62f1075da79-scripts\") pod \"cc38060e-b831-4300-9f91-e62f1075da79\" (UID: \"cc38060e-b831-4300-9f91-e62f1075da79\") " Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.767440 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"14d6a446-4f72-417e-8bdd-af837eda2e70\" (UID: \"14d6a446-4f72-417e-8bdd-af837eda2e70\") " Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.767583 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc38060e-b831-4300-9f91-e62f1075da79-combined-ca-bundle\") pod \"cc38060e-b831-4300-9f91-e62f1075da79\" (UID: \"cc38060e-b831-4300-9f91-e62f1075da79\") " Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.767640 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bc7kt\" (UniqueName: \"kubernetes.io/projected/14d6a446-4f72-417e-8bdd-af837eda2e70-kube-api-access-bc7kt\") pod \"14d6a446-4f72-417e-8bdd-af837eda2e70\" (UID: \"14d6a446-4f72-417e-8bdd-af837eda2e70\") " Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.767712 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14d6a446-4f72-417e-8bdd-af837eda2e70-logs\") pod \"14d6a446-4f72-417e-8bdd-af837eda2e70\" (UID: \"14d6a446-4f72-417e-8bdd-af837eda2e70\") " Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.767730 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w98jx\" (UniqueName: \"kubernetes.io/projected/cc38060e-b831-4300-9f91-e62f1075da79-kube-api-access-w98jx\") pod \"cc38060e-b831-4300-9f91-e62f1075da79\" (UID: \"cc38060e-b831-4300-9f91-e62f1075da79\") " Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.767747 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14d6a446-4f72-417e-8bdd-af837eda2e70-scripts\") pod \"14d6a446-4f72-417e-8bdd-af837eda2e70\" (UID: \"14d6a446-4f72-417e-8bdd-af837eda2e70\") " Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.767791 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cc38060e-b831-4300-9f91-e62f1075da79-httpd-run\") pod \"cc38060e-b831-4300-9f91-e62f1075da79\" (UID: \"cc38060e-b831-4300-9f91-e62f1075da79\") " Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.767788 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14d6a446-4f72-417e-8bdd-af837eda2e70-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "14d6a446-4f72-417e-8bdd-af837eda2e70" (UID: "14d6a446-4f72-417e-8bdd-af837eda2e70"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.768119 5002 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/14d6a446-4f72-417e-8bdd-af837eda2e70-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.768981 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14d6a446-4f72-417e-8bdd-af837eda2e70-logs" (OuterVolumeSpecName: "logs") pod "14d6a446-4f72-417e-8bdd-af837eda2e70" (UID: "14d6a446-4f72-417e-8bdd-af837eda2e70"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.775105 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "glance") pod "14d6a446-4f72-417e-8bdd-af837eda2e70" (UID: "14d6a446-4f72-417e-8bdd-af837eda2e70"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.776625 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc38060e-b831-4300-9f91-e62f1075da79-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "cc38060e-b831-4300-9f91-e62f1075da79" (UID: "cc38060e-b831-4300-9f91-e62f1075da79"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.778525 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc38060e-b831-4300-9f91-e62f1075da79-scripts" (OuterVolumeSpecName: "scripts") pod "cc38060e-b831-4300-9f91-e62f1075da79" (UID: "cc38060e-b831-4300-9f91-e62f1075da79"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.778773 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc38060e-b831-4300-9f91-e62f1075da79-logs" (OuterVolumeSpecName: "logs") pod "cc38060e-b831-4300-9f91-e62f1075da79" (UID: "cc38060e-b831-4300-9f91-e62f1075da79"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.783353 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "cc38060e-b831-4300-9f91-e62f1075da79" (UID: "cc38060e-b831-4300-9f91-e62f1075da79"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 12:37:04 crc kubenswrapper[5002]: E0930 12:37:04.785962 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Sep 30 12:37:04 crc kubenswrapper[5002]: E0930 12:37:04.786129 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n99h59fh66ch59h574hddh568h67fh677h598h579h66fhf7h7fh66ch698h657h59h696h67bh65h89hchb4h58ch698h545h5bh55bh695h664h7bq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ttjhs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-799d9cbb9f-xj4zl_openstack(28380b30-cf01-44f7-8ec6-b1812a5e8435): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.788259 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc38060e-b831-4300-9f91-e62f1075da79-kube-api-access-w98jx" (OuterVolumeSpecName: "kube-api-access-w98jx") pod "cc38060e-b831-4300-9f91-e62f1075da79" (UID: "cc38060e-b831-4300-9f91-e62f1075da79"). InnerVolumeSpecName "kube-api-access-w98jx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.788399 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14d6a446-4f72-417e-8bdd-af837eda2e70-kube-api-access-bc7kt" (OuterVolumeSpecName: "kube-api-access-bc7kt") pod "14d6a446-4f72-417e-8bdd-af837eda2e70" (UID: "14d6a446-4f72-417e-8bdd-af837eda2e70"). InnerVolumeSpecName "kube-api-access-bc7kt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:37:04 crc kubenswrapper[5002]: E0930 12:37:04.788450 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-799d9cbb9f-xj4zl" podUID="28380b30-cf01-44f7-8ec6-b1812a5e8435" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.792085 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14d6a446-4f72-417e-8bdd-af837eda2e70-scripts" (OuterVolumeSpecName: "scripts") pod "14d6a446-4f72-417e-8bdd-af837eda2e70" (UID: "14d6a446-4f72-417e-8bdd-af837eda2e70"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.820981 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc38060e-b831-4300-9f91-e62f1075da79-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cc38060e-b831-4300-9f91-e62f1075da79" (UID: "cc38060e-b831-4300-9f91-e62f1075da79"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.861275 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc38060e-b831-4300-9f91-e62f1075da79-config-data" (OuterVolumeSpecName: "config-data") pod "cc38060e-b831-4300-9f91-e62f1075da79" (UID: "cc38060e-b831-4300-9f91-e62f1075da79"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.868099 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14d6a446-4f72-417e-8bdd-af837eda2e70-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "14d6a446-4f72-417e-8bdd-af837eda2e70" (UID: "14d6a446-4f72-417e-8bdd-af837eda2e70"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.868708 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7d2bf63f-4cce-402b-b774-e77dc172959c-horizon-secret-key\") pod \"7d2bf63f-4cce-402b-b774-e77dc172959c\" (UID: \"7d2bf63f-4cce-402b-b774-e77dc172959c\") " Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.868828 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d2bf63f-4cce-402b-b774-e77dc172959c-logs\") pod \"7d2bf63f-4cce-402b-b774-e77dc172959c\" (UID: \"7d2bf63f-4cce-402b-b774-e77dc172959c\") " Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.868868 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7d2bf63f-4cce-402b-b774-e77dc172959c-config-data\") pod \"7d2bf63f-4cce-402b-b774-e77dc172959c\" (UID: \"7d2bf63f-4cce-402b-b774-e77dc172959c\") " Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.868969 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-blm2s\" (UniqueName: \"kubernetes.io/projected/7d2bf63f-4cce-402b-b774-e77dc172959c-kube-api-access-blm2s\") pod \"7d2bf63f-4cce-402b-b774-e77dc172959c\" (UID: \"7d2bf63f-4cce-402b-b774-e77dc172959c\") " Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.869030 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7d2bf63f-4cce-402b-b774-e77dc172959c-scripts\") pod \"7d2bf63f-4cce-402b-b774-e77dc172959c\" (UID: \"7d2bf63f-4cce-402b-b774-e77dc172959c\") " Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.869460 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d2bf63f-4cce-402b-b774-e77dc172959c-logs" (OuterVolumeSpecName: "logs") pod "7d2bf63f-4cce-402b-b774-e77dc172959c" (UID: "7d2bf63f-4cce-402b-b774-e77dc172959c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.869536 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14d6a446-4f72-417e-8bdd-af837eda2e70-logs\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.869559 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w98jx\" (UniqueName: \"kubernetes.io/projected/cc38060e-b831-4300-9f91-e62f1075da79-kube-api-access-w98jx\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.869573 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14d6a446-4f72-417e-8bdd-af837eda2e70-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.869583 5002 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cc38060e-b831-4300-9f91-e62f1075da79-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.869609 5002 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.869622 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc38060e-b831-4300-9f91-e62f1075da79-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.869635 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14d6a446-4f72-417e-8bdd-af837eda2e70-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.869648 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc38060e-b831-4300-9f91-e62f1075da79-logs\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.869660 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cc38060e-b831-4300-9f91-e62f1075da79-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.869678 5002 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.869690 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc38060e-b831-4300-9f91-e62f1075da79-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.869702 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bc7kt\" (UniqueName: \"kubernetes.io/projected/14d6a446-4f72-417e-8bdd-af837eda2e70-kube-api-access-bc7kt\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.870630 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d2bf63f-4cce-402b-b774-e77dc172959c-config-data" (OuterVolumeSpecName: "config-data") pod "7d2bf63f-4cce-402b-b774-e77dc172959c" (UID: "7d2bf63f-4cce-402b-b774-e77dc172959c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.871148 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d2bf63f-4cce-402b-b774-e77dc172959c-scripts" (OuterVolumeSpecName: "scripts") pod "7d2bf63f-4cce-402b-b774-e77dc172959c" (UID: "7d2bf63f-4cce-402b-b774-e77dc172959c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.886939 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d2bf63f-4cce-402b-b774-e77dc172959c-kube-api-access-blm2s" (OuterVolumeSpecName: "kube-api-access-blm2s") pod "7d2bf63f-4cce-402b-b774-e77dc172959c" (UID: "7d2bf63f-4cce-402b-b774-e77dc172959c"). InnerVolumeSpecName "kube-api-access-blm2s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.892497 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d2bf63f-4cce-402b-b774-e77dc172959c-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "7d2bf63f-4cce-402b-b774-e77dc172959c" (UID: "7d2bf63f-4cce-402b-b774-e77dc172959c"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.898545 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14d6a446-4f72-417e-8bdd-af837eda2e70-config-data" (OuterVolumeSpecName: "config-data") pod "14d6a446-4f72-417e-8bdd-af837eda2e70" (UID: "14d6a446-4f72-417e-8bdd-af837eda2e70"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.903420 5002 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.905609 5002 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.968346 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.972825 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7d2bf63f-4cce-402b-b774-e77dc172959c-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.972850 5002 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.972863 5002 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7d2bf63f-4cce-402b-b774-e77dc172959c-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.972876 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d2bf63f-4cce-402b-b774-e77dc172959c-logs\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.972884 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7d2bf63f-4cce-402b-b774-e77dc172959c-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.972891 5002 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.972899 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14d6a446-4f72-417e-8bdd-af837eda2e70-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:04 crc kubenswrapper[5002]: I0930 12:37:04.972907 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-blm2s\" (UniqueName: \"kubernetes.io/projected/7d2bf63f-4cce-402b-b774-e77dc172959c-kube-api-access-blm2s\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.075759 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b4hps\" (UniqueName: \"kubernetes.io/projected/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-kube-api-access-b4hps\") pod \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\" (UID: \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\") " Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.075834 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-dns-swift-storage-0\") pod \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\" (UID: \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\") " Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.075957 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-ovsdbserver-nb\") pod \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\" (UID: \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\") " Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.075993 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-config\") pod \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\" (UID: \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\") " Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.076088 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-dns-svc\") pod \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\" (UID: \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\") " Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.076197 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-ovsdbserver-sb\") pod \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\" (UID: \"cd808dd7-6b41-4208-8f6e-c54bea4d22c5\") " Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.084414 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-kube-api-access-b4hps" (OuterVolumeSpecName: "kube-api-access-b4hps") pod "cd808dd7-6b41-4208-8f6e-c54bea4d22c5" (UID: "cd808dd7-6b41-4208-8f6e-c54bea4d22c5"). InnerVolumeSpecName "kube-api-access-b4hps". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.107807 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"cc38060e-b831-4300-9f91-e62f1075da79","Type":"ContainerDied","Data":"7b1395b086ab7451066f6ed28d50cfa07a8ec6d56a798cddbb29c97f664fcbb3"} Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.107870 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.107883 5002 scope.go:117] "RemoveContainer" containerID="9d185eb5745a75883dce234f0ddd099b8145cb59e56eb83ae3b7ff72f482cf28" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.109848 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"14d6a446-4f72-417e-8bdd-af837eda2e70","Type":"ContainerDied","Data":"7495e664c47a536f13a523bed9ecb6a1e2ab89fe397ef0d78fca0f58078ff413"} Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.109868 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.117682 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" event={"ID":"cd808dd7-6b41-4208-8f6e-c54bea4d22c5","Type":"ContainerDied","Data":"ca0313ab28b30572e70f6693636759d6299b7e4e7c54c628545681b4afe9b858"} Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.117774 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-6kcs9" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.122599 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7cf8dd6b55-z9psc" event={"ID":"7d2bf63f-4cce-402b-b774-e77dc172959c","Type":"ContainerDied","Data":"eb75eb3ee65a0d84d162a72d4b054c2f5cec0985fc6a782f23b4cce7bfc20470"} Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.122637 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7cf8dd6b55-z9psc" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.127449 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-config" (OuterVolumeSpecName: "config") pod "cd808dd7-6b41-4208-8f6e-c54bea4d22c5" (UID: "cd808dd7-6b41-4208-8f6e-c54bea4d22c5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.141251 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "cd808dd7-6b41-4208-8f6e-c54bea4d22c5" (UID: "cd808dd7-6b41-4208-8f6e-c54bea4d22c5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.151374 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "cd808dd7-6b41-4208-8f6e-c54bea4d22c5" (UID: "cd808dd7-6b41-4208-8f6e-c54bea4d22c5"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.152075 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "cd808dd7-6b41-4208-8f6e-c54bea4d22c5" (UID: "cd808dd7-6b41-4208-8f6e-c54bea4d22c5"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.154849 5002 scope.go:117] "RemoveContainer" containerID="0b4707074aebf2d025ef4b985dd07c9692750ee98e7f6a36594735de1f63e364" Sep 30 12:37:05 crc kubenswrapper[5002]: E0930 12:37:05.154895 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-placement-api:current-podified\\\"\"" pod="openstack/placement-db-sync-bnf58" podUID="4fdf0c77-68ae-41ff-b6b5-122baa461b8c" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.156260 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "cd808dd7-6b41-4208-8f6e-c54bea4d22c5" (UID: "cd808dd7-6b41-4208-8f6e-c54bea4d22c5"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.202182 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.202226 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.202242 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.202263 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.202276 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b4hps\" (UniqueName: \"kubernetes.io/projected/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-kube-api-access-b4hps\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.202291 5002 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cd808dd7-6b41-4208-8f6e-c54bea4d22c5-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.223587 5002 scope.go:117] "RemoveContainer" containerID="1a11bac8fa92b6ef5bf7f5731730deb7d987f7936eeb25001fcc56ca768e2725" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.241546 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.256119 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.267482 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 12:37:05 crc kubenswrapper[5002]: E0930 12:37:05.267818 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd808dd7-6b41-4208-8f6e-c54bea4d22c5" containerName="dnsmasq-dns" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.267835 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd808dd7-6b41-4208-8f6e-c54bea4d22c5" containerName="dnsmasq-dns" Sep 30 12:37:05 crc kubenswrapper[5002]: E0930 12:37:05.267857 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14d6a446-4f72-417e-8bdd-af837eda2e70" containerName="glance-log" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.267864 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="14d6a446-4f72-417e-8bdd-af837eda2e70" containerName="glance-log" Sep 30 12:37:05 crc kubenswrapper[5002]: E0930 12:37:05.267882 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14d6a446-4f72-417e-8bdd-af837eda2e70" containerName="glance-httpd" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.267887 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="14d6a446-4f72-417e-8bdd-af837eda2e70" containerName="glance-httpd" Sep 30 12:37:05 crc kubenswrapper[5002]: E0930 12:37:05.267898 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc38060e-b831-4300-9f91-e62f1075da79" containerName="glance-httpd" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.267904 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc38060e-b831-4300-9f91-e62f1075da79" containerName="glance-httpd" Sep 30 12:37:05 crc kubenswrapper[5002]: E0930 12:37:05.267915 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd808dd7-6b41-4208-8f6e-c54bea4d22c5" containerName="init" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.267921 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd808dd7-6b41-4208-8f6e-c54bea4d22c5" containerName="init" Sep 30 12:37:05 crc kubenswrapper[5002]: E0930 12:37:05.267934 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc38060e-b831-4300-9f91-e62f1075da79" containerName="glance-log" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.267940 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc38060e-b831-4300-9f91-e62f1075da79" containerName="glance-log" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.268085 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="14d6a446-4f72-417e-8bdd-af837eda2e70" containerName="glance-log" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.268103 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd808dd7-6b41-4208-8f6e-c54bea4d22c5" containerName="dnsmasq-dns" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.268111 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc38060e-b831-4300-9f91-e62f1075da79" containerName="glance-log" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.268120 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="14d6a446-4f72-417e-8bdd-af837eda2e70" containerName="glance-httpd" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.268132 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc38060e-b831-4300-9f91-e62f1075da79" containerName="glance-httpd" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.268976 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.277080 5002 scope.go:117] "RemoveContainer" containerID="9cfaf90944d3681da676e69ecb8c3719aecc5ffc5074ac818734df38142da6e2" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.277422 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-4pcks" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.277609 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.278252 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.280081 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.296508 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.306719 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 12:37:05 crc kubenswrapper[5002]: W0930 12:37:05.316320 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod397fcd72_2dc0_4de0_9a5d_69fc3b14640a.slice/crio-ed122d25344570bfe55d23f9dc0ffa9c5562002727ba70a1b1537fb488f6fcb6 WatchSource:0}: Error finding container ed122d25344570bfe55d23f9dc0ffa9c5562002727ba70a1b1537fb488f6fcb6: Status 404 returned error can't find the container with id ed122d25344570bfe55d23f9dc0ffa9c5562002727ba70a1b1537fb488f6fcb6 Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.316370 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.324495 5002 scope.go:117] "RemoveContainer" containerID="7258208ab5a087fe79abec802c051a0fafc3d3516365918b10268067678c5af2" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.337258 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.339368 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.341795 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.343049 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.352227 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7cf8dd6b55-z9psc"] Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.363848 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7cf8dd6b55-z9psc"] Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.366181 5002 scope.go:117] "RemoveContainer" containerID="0f63e3fb4af0a8e9d7d550e840712ee1e1f5cb91aff5bf9e33d0f769c364c436" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.379800 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.405822 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-c8b1-account-create-tkp6v"] Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.406384 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " pod="openstack/glance-default-external-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.406434 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tq4xz\" (UniqueName: \"kubernetes.io/projected/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-kube-api-access-tq4xz\") pod \"glance-default-external-api-0\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " pod="openstack/glance-default-external-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.406481 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " pod="openstack/glance-default-external-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.406517 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-scripts\") pod \"glance-default-external-api-0\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " pod="openstack/glance-default-external-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.406571 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " pod="openstack/glance-default-external-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.406596 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7c25\" (UniqueName: \"kubernetes.io/projected/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-kube-api-access-k7c25\") pod \"glance-default-internal-api-0\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.406624 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-logs\") pod \"glance-default-external-api-0\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " pod="openstack/glance-default-external-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.406659 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.406699 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.406720 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.406738 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.408639 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " pod="openstack/glance-default-external-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.408749 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-config-data\") pod \"glance-default-external-api-0\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " pod="openstack/glance-default-external-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.408798 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.408938 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.408979 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-logs\") pod \"glance-default-internal-api-0\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.426058 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-0907-account-create-r5dxh"] Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.470368 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-6kcs9"] Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.483235 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-6kcs9"] Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.511664 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.511922 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-logs\") pod \"glance-default-internal-api-0\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.511949 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " pod="openstack/glance-default-external-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.511970 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tq4xz\" (UniqueName: \"kubernetes.io/projected/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-kube-api-access-tq4xz\") pod \"glance-default-external-api-0\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " pod="openstack/glance-default-external-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.511989 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " pod="openstack/glance-default-external-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.512011 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-scripts\") pod \"glance-default-external-api-0\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " pod="openstack/glance-default-external-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.512030 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " pod="openstack/glance-default-external-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.512046 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7c25\" (UniqueName: \"kubernetes.io/projected/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-kube-api-access-k7c25\") pod \"glance-default-internal-api-0\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.512064 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-logs\") pod \"glance-default-external-api-0\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " pod="openstack/glance-default-external-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.512086 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.512112 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.512128 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.512146 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.512177 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " pod="openstack/glance-default-external-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.512203 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-config-data\") pod \"glance-default-external-api-0\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " pod="openstack/glance-default-external-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.512219 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.513222 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/glance-default-internal-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.514289 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-logs\") pod \"glance-default-internal-api-0\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.515179 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.515442 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-logs\") pod \"glance-default-external-api-0\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " pod="openstack/glance-default-external-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.518797 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " pod="openstack/glance-default-external-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.519773 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-external-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.520368 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.524351 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.525384 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " pod="openstack/glance-default-external-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.526194 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " pod="openstack/glance-default-external-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.526992 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.527423 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.530587 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-config-data\") pod \"glance-default-external-api-0\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " pod="openstack/glance-default-external-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.530594 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tq4xz\" (UniqueName: \"kubernetes.io/projected/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-kube-api-access-tq4xz\") pod \"glance-default-external-api-0\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " pod="openstack/glance-default-external-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.532664 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7c25\" (UniqueName: \"kubernetes.io/projected/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-kube-api-access-k7c25\") pod \"glance-default-internal-api-0\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.535706 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-scripts\") pod \"glance-default-external-api-0\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " pod="openstack/glance-default-external-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.558938 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " pod="openstack/glance-default-external-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.569154 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.605356 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.668724 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-56d859b67f-fr8wt"] Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.671659 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.697660 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dcab-account-create-mxdfk"] Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.715505 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-f8777b74-fpwh2"] Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.725816 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-88rff"] Sep 30 12:37:05 crc kubenswrapper[5002]: W0930 12:37:05.727118 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod418c45be_fdf8_41be_899d_a75e1ab0acef.slice/crio-101beab084b6026dd5988ef7154ae14b81c2dfd297b6030cf1513e41ae7d265a WatchSource:0}: Error finding container 101beab084b6026dd5988ef7154ae14b81c2dfd297b6030cf1513e41ae7d265a: Status 404 returned error can't find the container with id 101beab084b6026dd5988ef7154ae14b81c2dfd297b6030cf1513e41ae7d265a Sep 30 12:37:05 crc kubenswrapper[5002]: W0930 12:37:05.741092 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7c12a4dd_a3df_4106_ab48_b628b89b3277.slice/crio-b7d96322f9594d1907cc93bebdaa20edcb55070f25bbb0b859395c2b8346b817 WatchSource:0}: Error finding container b7d96322f9594d1907cc93bebdaa20edcb55070f25bbb0b859395c2b8346b817: Status 404 returned error can't find the container with id b7d96322f9594d1907cc93bebdaa20edcb55070f25bbb0b859395c2b8346b817 Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.746578 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-66c799f4f6-jprmr"] Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.813370 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-799d9cbb9f-xj4zl" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.940232 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/28380b30-cf01-44f7-8ec6-b1812a5e8435-config-data\") pod \"28380b30-cf01-44f7-8ec6-b1812a5e8435\" (UID: \"28380b30-cf01-44f7-8ec6-b1812a5e8435\") " Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.940282 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/28380b30-cf01-44f7-8ec6-b1812a5e8435-scripts\") pod \"28380b30-cf01-44f7-8ec6-b1812a5e8435\" (UID: \"28380b30-cf01-44f7-8ec6-b1812a5e8435\") " Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.940372 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/28380b30-cf01-44f7-8ec6-b1812a5e8435-horizon-secret-key\") pod \"28380b30-cf01-44f7-8ec6-b1812a5e8435\" (UID: \"28380b30-cf01-44f7-8ec6-b1812a5e8435\") " Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.940460 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ttjhs\" (UniqueName: \"kubernetes.io/projected/28380b30-cf01-44f7-8ec6-b1812a5e8435-kube-api-access-ttjhs\") pod \"28380b30-cf01-44f7-8ec6-b1812a5e8435\" (UID: \"28380b30-cf01-44f7-8ec6-b1812a5e8435\") " Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.940508 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28380b30-cf01-44f7-8ec6-b1812a5e8435-logs\") pod \"28380b30-cf01-44f7-8ec6-b1812a5e8435\" (UID: \"28380b30-cf01-44f7-8ec6-b1812a5e8435\") " Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.941260 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28380b30-cf01-44f7-8ec6-b1812a5e8435-logs" (OuterVolumeSpecName: "logs") pod "28380b30-cf01-44f7-8ec6-b1812a5e8435" (UID: "28380b30-cf01-44f7-8ec6-b1812a5e8435"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.942419 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/28380b30-cf01-44f7-8ec6-b1812a5e8435-scripts" (OuterVolumeSpecName: "scripts") pod "28380b30-cf01-44f7-8ec6-b1812a5e8435" (UID: "28380b30-cf01-44f7-8ec6-b1812a5e8435"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.942622 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/28380b30-cf01-44f7-8ec6-b1812a5e8435-config-data" (OuterVolumeSpecName: "config-data") pod "28380b30-cf01-44f7-8ec6-b1812a5e8435" (UID: "28380b30-cf01-44f7-8ec6-b1812a5e8435"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.945334 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28380b30-cf01-44f7-8ec6-b1812a5e8435-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "28380b30-cf01-44f7-8ec6-b1812a5e8435" (UID: "28380b30-cf01-44f7-8ec6-b1812a5e8435"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:05 crc kubenswrapper[5002]: I0930 12:37:05.949015 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28380b30-cf01-44f7-8ec6-b1812a5e8435-kube-api-access-ttjhs" (OuterVolumeSpecName: "kube-api-access-ttjhs") pod "28380b30-cf01-44f7-8ec6-b1812a5e8435" (UID: "28380b30-cf01-44f7-8ec6-b1812a5e8435"). InnerVolumeSpecName "kube-api-access-ttjhs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:37:06 crc kubenswrapper[5002]: I0930 12:37:06.043538 5002 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/28380b30-cf01-44f7-8ec6-b1812a5e8435-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:06 crc kubenswrapper[5002]: I0930 12:37:06.043581 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ttjhs\" (UniqueName: \"kubernetes.io/projected/28380b30-cf01-44f7-8ec6-b1812a5e8435-kube-api-access-ttjhs\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:06 crc kubenswrapper[5002]: I0930 12:37:06.043597 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28380b30-cf01-44f7-8ec6-b1812a5e8435-logs\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:06 crc kubenswrapper[5002]: I0930 12:37:06.043610 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/28380b30-cf01-44f7-8ec6-b1812a5e8435-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:06 crc kubenswrapper[5002]: I0930 12:37:06.043621 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/28380b30-cf01-44f7-8ec6-b1812a5e8435-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:06 crc kubenswrapper[5002]: I0930 12:37:06.134078 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-56d859b67f-fr8wt" event={"ID":"b44591be-b0af-4144-a646-7739c7ea1e69","Type":"ContainerStarted","Data":"32461133d62736c3a38370169d40920b6041ffc54b0fd4e9d1ce4b46792d3672"} Sep 30 12:37:06 crc kubenswrapper[5002]: I0930 12:37:06.135278 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-799d9cbb9f-xj4zl" event={"ID":"28380b30-cf01-44f7-8ec6-b1812a5e8435","Type":"ContainerDied","Data":"8b9cf2c972cd245b0494fe689bf4ca39a5cf1bcf8dfd925148adbd20e9610ed3"} Sep 30 12:37:06 crc kubenswrapper[5002]: I0930 12:37:06.135301 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-799d9cbb9f-xj4zl" Sep 30 12:37:06 crc kubenswrapper[5002]: I0930 12:37:06.137267 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-88rff" event={"ID":"418c45be-fdf8-41be-899d-a75e1ab0acef","Type":"ContainerStarted","Data":"101beab084b6026dd5988ef7154ae14b81c2dfd297b6030cf1513e41ae7d265a"} Sep 30 12:37:06 crc kubenswrapper[5002]: I0930 12:37:06.139566 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dcab-account-create-mxdfk" event={"ID":"67948d65-c149-45be-914e-30dc00325da6","Type":"ContainerStarted","Data":"0982733f7a6e1efc9625ba7e6d99fb7c95af8ca67ab92697e4f91a0aa56a2235"} Sep 30 12:37:06 crc kubenswrapper[5002]: I0930 12:37:06.146374 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-c8b1-account-create-tkp6v" event={"ID":"397fcd72-2dc0-4de0-9a5d-69fc3b14640a","Type":"ContainerStarted","Data":"ed122d25344570bfe55d23f9dc0ffa9c5562002727ba70a1b1537fb488f6fcb6"} Sep 30 12:37:06 crc kubenswrapper[5002]: I0930 12:37:06.151401 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-f8777b74-fpwh2" event={"ID":"fd4d1d88-c894-496a-b2ee-00bf80fa2415","Type":"ContainerStarted","Data":"9f7eac6e87e340a6eae3d08f151d25141a38926b084074456b30b71482b6790a"} Sep 30 12:37:06 crc kubenswrapper[5002]: I0930 12:37:06.165649 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-0907-account-create-r5dxh" event={"ID":"153e0644-8b78-4c21-9377-a0d4c7cf0848","Type":"ContainerStarted","Data":"fe3ea557c96c9ae8904086f50659f5ab27f89db887faed7743cb323bca9b2620"} Sep 30 12:37:06 crc kubenswrapper[5002]: I0930 12:37:06.172091 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-66c799f4f6-jprmr" event={"ID":"7c12a4dd-a3df-4106-ab48-b628b89b3277","Type":"ContainerStarted","Data":"b7d96322f9594d1907cc93bebdaa20edcb55070f25bbb0b859395c2b8346b817"} Sep 30 12:37:06 crc kubenswrapper[5002]: I0930 12:37:06.198340 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-799d9cbb9f-xj4zl"] Sep 30 12:37:06 crc kubenswrapper[5002]: I0930 12:37:06.203325 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-799d9cbb9f-xj4zl"] Sep 30 12:37:06 crc kubenswrapper[5002]: I0930 12:37:06.281949 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 12:37:06 crc kubenswrapper[5002]: I0930 12:37:06.385609 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 12:37:06 crc kubenswrapper[5002]: W0930 12:37:06.393390 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5cb9bff6_f879_4cb0_87ca_9a1879f8c516.slice/crio-9dda3d81cbd7a6e68714d7b41f2b0bea45c77f2ae2007cf420c108537c62e903 WatchSource:0}: Error finding container 9dda3d81cbd7a6e68714d7b41f2b0bea45c77f2ae2007cf420c108537c62e903: Status 404 returned error can't find the container with id 9dda3d81cbd7a6e68714d7b41f2b0bea45c77f2ae2007cf420c108537c62e903 Sep 30 12:37:06 crc kubenswrapper[5002]: I0930 12:37:06.699686 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14d6a446-4f72-417e-8bdd-af837eda2e70" path="/var/lib/kubelet/pods/14d6a446-4f72-417e-8bdd-af837eda2e70/volumes" Sep 30 12:37:06 crc kubenswrapper[5002]: I0930 12:37:06.701067 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28380b30-cf01-44f7-8ec6-b1812a5e8435" path="/var/lib/kubelet/pods/28380b30-cf01-44f7-8ec6-b1812a5e8435/volumes" Sep 30 12:37:06 crc kubenswrapper[5002]: I0930 12:37:06.701869 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d2bf63f-4cce-402b-b774-e77dc172959c" path="/var/lib/kubelet/pods/7d2bf63f-4cce-402b-b774-e77dc172959c/volumes" Sep 30 12:37:06 crc kubenswrapper[5002]: I0930 12:37:06.705782 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc38060e-b831-4300-9f91-e62f1075da79" path="/var/lib/kubelet/pods/cc38060e-b831-4300-9f91-e62f1075da79/volumes" Sep 30 12:37:06 crc kubenswrapper[5002]: I0930 12:37:06.708287 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd808dd7-6b41-4208-8f6e-c54bea4d22c5" path="/var/lib/kubelet/pods/cd808dd7-6b41-4208-8f6e-c54bea4d22c5/volumes" Sep 30 12:37:07 crc kubenswrapper[5002]: I0930 12:37:07.187852 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dcab-account-create-mxdfk" event={"ID":"67948d65-c149-45be-914e-30dc00325da6","Type":"ContainerStarted","Data":"fa9eea698781ec5b9c457e406dd33da4c5fdd3ca7f67169866bf33c25f461b3b"} Sep 30 12:37:07 crc kubenswrapper[5002]: I0930 12:37:07.189770 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-c8b1-account-create-tkp6v" event={"ID":"397fcd72-2dc0-4de0-9a5d-69fc3b14640a","Type":"ContainerStarted","Data":"0236891218dc97d1b4becd90b51dfe0651b3e5525f5ae116726ca521324018c9"} Sep 30 12:37:07 crc kubenswrapper[5002]: I0930 12:37:07.206745 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5cb9bff6-f879-4cb0-87ca-9a1879f8c516","Type":"ContainerStarted","Data":"3df199381973c968502aa99e0c1d592eb10b7346112a85f99ec1f4bd5c28c8c7"} Sep 30 12:37:07 crc kubenswrapper[5002]: I0930 12:37:07.206785 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5cb9bff6-f879-4cb0-87ca-9a1879f8c516","Type":"ContainerStarted","Data":"9dda3d81cbd7a6e68714d7b41f2b0bea45c77f2ae2007cf420c108537c62e903"} Sep 30 12:37:07 crc kubenswrapper[5002]: I0930 12:37:07.215143 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-0907-account-create-r5dxh" event={"ID":"153e0644-8b78-4c21-9377-a0d4c7cf0848","Type":"ContainerStarted","Data":"57ebfa51022e322f9dd7641936908a30044d5cc2bbf5d1c6b50d263f0dcab1f9"} Sep 30 12:37:07 crc kubenswrapper[5002]: I0930 12:37:07.218041 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-dcab-account-create-mxdfk" podStartSLOduration=20.218025764 podStartE2EDuration="20.218025764s" podCreationTimestamp="2025-09-30 12:36:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:37:07.211632101 +0000 UTC m=+1001.461314247" watchObservedRunningTime="2025-09-30 12:37:07.218025764 +0000 UTC m=+1001.467707910" Sep 30 12:37:07 crc kubenswrapper[5002]: I0930 12:37:07.220381 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-88rff" event={"ID":"418c45be-fdf8-41be-899d-a75e1ab0acef","Type":"ContainerStarted","Data":"c1a881029eb753f5bb8fdb3b059612db62c1dec044feab98e868aa941e59a615"} Sep 30 12:37:07 crc kubenswrapper[5002]: I0930 12:37:07.226792 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a39b35d6-60c5-4ceb-b46e-9a00daf421ce","Type":"ContainerStarted","Data":"cb042ef982b9bba57298308ede4cfed4f64370303f1c8aff2feb543e7f657ff2"} Sep 30 12:37:07 crc kubenswrapper[5002]: I0930 12:37:07.226840 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a39b35d6-60c5-4ceb-b46e-9a00daf421ce","Type":"ContainerStarted","Data":"dfd7e88822b8a9fdfaca82d5fb76e1c34b62c5d069c8fd685db27aec25cbf9a5"} Sep 30 12:37:07 crc kubenswrapper[5002]: I0930 12:37:07.227457 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-c8b1-account-create-tkp6v" podStartSLOduration=21.227444595 podStartE2EDuration="21.227444595s" podCreationTimestamp="2025-09-30 12:36:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:37:07.224726005 +0000 UTC m=+1001.474408151" watchObservedRunningTime="2025-09-30 12:37:07.227444595 +0000 UTC m=+1001.477126741" Sep 30 12:37:07 crc kubenswrapper[5002]: I0930 12:37:07.246273 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-88rff" podStartSLOduration=5.246255253 podStartE2EDuration="5.246255253s" podCreationTimestamp="2025-09-30 12:37:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:37:07.241705978 +0000 UTC m=+1001.491388124" watchObservedRunningTime="2025-09-30 12:37:07.246255253 +0000 UTC m=+1001.495937399" Sep 30 12:37:07 crc kubenswrapper[5002]: I0930 12:37:07.254349 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-0907-account-create-r5dxh" podStartSLOduration=21.254328779 podStartE2EDuration="21.254328779s" podCreationTimestamp="2025-09-30 12:36:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:37:07.251546698 +0000 UTC m=+1001.501228854" watchObservedRunningTime="2025-09-30 12:37:07.254328779 +0000 UTC m=+1001.504010935" Sep 30 12:37:08 crc kubenswrapper[5002]: I0930 12:37:08.237985 5002 generic.go:334] "Generic (PLEG): container finished" podID="153e0644-8b78-4c21-9377-a0d4c7cf0848" containerID="57ebfa51022e322f9dd7641936908a30044d5cc2bbf5d1c6b50d263f0dcab1f9" exitCode=0 Sep 30 12:37:08 crc kubenswrapper[5002]: I0930 12:37:08.238072 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-0907-account-create-r5dxh" event={"ID":"153e0644-8b78-4c21-9377-a0d4c7cf0848","Type":"ContainerDied","Data":"57ebfa51022e322f9dd7641936908a30044d5cc2bbf5d1c6b50d263f0dcab1f9"} Sep 30 12:37:08 crc kubenswrapper[5002]: I0930 12:37:08.240348 5002 generic.go:334] "Generic (PLEG): container finished" podID="67948d65-c149-45be-914e-30dc00325da6" containerID="fa9eea698781ec5b9c457e406dd33da4c5fdd3ca7f67169866bf33c25f461b3b" exitCode=0 Sep 30 12:37:08 crc kubenswrapper[5002]: I0930 12:37:08.240369 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dcab-account-create-mxdfk" event={"ID":"67948d65-c149-45be-914e-30dc00325da6","Type":"ContainerDied","Data":"fa9eea698781ec5b9c457e406dd33da4c5fdd3ca7f67169866bf33c25f461b3b"} Sep 30 12:37:08 crc kubenswrapper[5002]: I0930 12:37:08.241956 5002 generic.go:334] "Generic (PLEG): container finished" podID="397fcd72-2dc0-4de0-9a5d-69fc3b14640a" containerID="0236891218dc97d1b4becd90b51dfe0651b3e5525f5ae116726ca521324018c9" exitCode=0 Sep 30 12:37:08 crc kubenswrapper[5002]: I0930 12:37:08.242023 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-c8b1-account-create-tkp6v" event={"ID":"397fcd72-2dc0-4de0-9a5d-69fc3b14640a","Type":"ContainerDied","Data":"0236891218dc97d1b4becd90b51dfe0651b3e5525f5ae116726ca521324018c9"} Sep 30 12:37:09 crc kubenswrapper[5002]: I0930 12:37:09.264633 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5cb9bff6-f879-4cb0-87ca-9a1879f8c516","Type":"ContainerStarted","Data":"c35f92f62bacb2046bb207b031c37b392d4486f818a61d44e1501770052babf0"} Sep 30 12:37:09 crc kubenswrapper[5002]: I0930 12:37:09.272410 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-66c799f4f6-jprmr" event={"ID":"7c12a4dd-a3df-4106-ab48-b628b89b3277","Type":"ContainerStarted","Data":"7510a75a7907971c46b1edad984891489441dd0547be22999f2869cc15b07d7b"} Sep 30 12:37:09 crc kubenswrapper[5002]: I0930 12:37:09.272453 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-66c799f4f6-jprmr" event={"ID":"7c12a4dd-a3df-4106-ab48-b628b89b3277","Type":"ContainerStarted","Data":"29865b78d8a014aa6e8a8ef4ed80b29c3000a2d2a2523dfa05a8ee7116f7385d"} Sep 30 12:37:09 crc kubenswrapper[5002]: I0930 12:37:09.274635 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22178f02-1a64-4a88-a564-fe143875e7df","Type":"ContainerStarted","Data":"654d1adc3ddf20f5867bd8c29763dc1ed55ab3326b36f37943a8648cb889b6af"} Sep 30 12:37:09 crc kubenswrapper[5002]: I0930 12:37:09.278157 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-56d859b67f-fr8wt" event={"ID":"b44591be-b0af-4144-a646-7739c7ea1e69","Type":"ContainerStarted","Data":"9d400a84a73488d328314feecae4c3308da84f67dd6cdae0ad3616a9b96c4712"} Sep 30 12:37:09 crc kubenswrapper[5002]: I0930 12:37:09.278230 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-56d859b67f-fr8wt" event={"ID":"b44591be-b0af-4144-a646-7739c7ea1e69","Type":"ContainerStarted","Data":"d84b1612d7f900cbe04d8ca03dbaf098a8a8031bad4cd4bfc918107c0aba8633"} Sep 30 12:37:09 crc kubenswrapper[5002]: I0930 12:37:09.278269 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-56d859b67f-fr8wt" podUID="b44591be-b0af-4144-a646-7739c7ea1e69" containerName="horizon-log" containerID="cri-o://d84b1612d7f900cbe04d8ca03dbaf098a8a8031bad4cd4bfc918107c0aba8633" gracePeriod=30 Sep 30 12:37:09 crc kubenswrapper[5002]: I0930 12:37:09.278300 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-56d859b67f-fr8wt" podUID="b44591be-b0af-4144-a646-7739c7ea1e69" containerName="horizon" containerID="cri-o://9d400a84a73488d328314feecae4c3308da84f67dd6cdae0ad3616a9b96c4712" gracePeriod=30 Sep 30 12:37:09 crc kubenswrapper[5002]: I0930 12:37:09.289508 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.289493136 podStartE2EDuration="4.289493136s" podCreationTimestamp="2025-09-30 12:37:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:37:09.286446869 +0000 UTC m=+1003.536129025" watchObservedRunningTime="2025-09-30 12:37:09.289493136 +0000 UTC m=+1003.539175282" Sep 30 12:37:09 crc kubenswrapper[5002]: I0930 12:37:09.296155 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a39b35d6-60c5-4ceb-b46e-9a00daf421ce","Type":"ContainerStarted","Data":"4a6d4dfe07a149acaf5c29368d8b2ab40a63c24a6f678e21205f44c0e8edd66e"} Sep 30 12:37:09 crc kubenswrapper[5002]: I0930 12:37:09.302330 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-f8777b74-fpwh2" event={"ID":"fd4d1d88-c894-496a-b2ee-00bf80fa2415","Type":"ContainerStarted","Data":"8b84e3b52cde98cea92f145d0960b70ae2e34eb1f70dee734351f11ecd31ab55"} Sep 30 12:37:09 crc kubenswrapper[5002]: I0930 12:37:09.302425 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-f8777b74-fpwh2" event={"ID":"fd4d1d88-c894-496a-b2ee-00bf80fa2415","Type":"ContainerStarted","Data":"20378dc396b5e4c91b822c9380bf06eb3c519550f74b233a99bb346dd6c21296"} Sep 30 12:37:09 crc kubenswrapper[5002]: I0930 12:37:09.321540 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-66c799f4f6-jprmr" podStartSLOduration=14.011653437 podStartE2EDuration="16.321520842s" podCreationTimestamp="2025-09-30 12:36:53 +0000 UTC" firstStartedPulling="2025-09-30 12:37:05.759576306 +0000 UTC m=+1000.009258442" lastFinishedPulling="2025-09-30 12:37:08.069443701 +0000 UTC m=+1002.319125847" observedRunningTime="2025-09-30 12:37:09.305708 +0000 UTC m=+1003.555390156" watchObservedRunningTime="2025-09-30 12:37:09.321520842 +0000 UTC m=+1003.571202998" Sep 30 12:37:09 crc kubenswrapper[5002]: I0930 12:37:09.350634 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-56d859b67f-fr8wt" podStartSLOduration=19.963310207 podStartE2EDuration="22.350614304s" podCreationTimestamp="2025-09-30 12:36:47 +0000 UTC" firstStartedPulling="2025-09-30 12:37:05.678436609 +0000 UTC m=+999.928118765" lastFinishedPulling="2025-09-30 12:37:08.065740726 +0000 UTC m=+1002.315422862" observedRunningTime="2025-09-30 12:37:09.323978115 +0000 UTC m=+1003.573660281" watchObservedRunningTime="2025-09-30 12:37:09.350614304 +0000 UTC m=+1003.600296450" Sep 30 12:37:09 crc kubenswrapper[5002]: I0930 12:37:09.356677 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-f8777b74-fpwh2" podStartSLOduration=13.994450569 podStartE2EDuration="16.356653477s" podCreationTimestamp="2025-09-30 12:36:53 +0000 UTC" firstStartedPulling="2025-09-30 12:37:05.707215142 +0000 UTC m=+999.956897288" lastFinishedPulling="2025-09-30 12:37:08.06941805 +0000 UTC m=+1002.319100196" observedRunningTime="2025-09-30 12:37:09.345814172 +0000 UTC m=+1003.595496318" watchObservedRunningTime="2025-09-30 12:37:09.356653477 +0000 UTC m=+1003.606335623" Sep 30 12:37:09 crc kubenswrapper[5002]: I0930 12:37:09.378603 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.378584826 podStartE2EDuration="4.378584826s" podCreationTimestamp="2025-09-30 12:37:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:37:09.364685082 +0000 UTC m=+1003.614367228" watchObservedRunningTime="2025-09-30 12:37:09.378584826 +0000 UTC m=+1003.628266972" Sep 30 12:37:09 crc kubenswrapper[5002]: I0930 12:37:09.783456 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-c8b1-account-create-tkp6v" Sep 30 12:37:09 crc kubenswrapper[5002]: I0930 12:37:09.852977 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b9w76\" (UniqueName: \"kubernetes.io/projected/397fcd72-2dc0-4de0-9a5d-69fc3b14640a-kube-api-access-b9w76\") pod \"397fcd72-2dc0-4de0-9a5d-69fc3b14640a\" (UID: \"397fcd72-2dc0-4de0-9a5d-69fc3b14640a\") " Sep 30 12:37:09 crc kubenswrapper[5002]: I0930 12:37:09.865811 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/397fcd72-2dc0-4de0-9a5d-69fc3b14640a-kube-api-access-b9w76" (OuterVolumeSpecName: "kube-api-access-b9w76") pod "397fcd72-2dc0-4de0-9a5d-69fc3b14640a" (UID: "397fcd72-2dc0-4de0-9a5d-69fc3b14640a"). InnerVolumeSpecName "kube-api-access-b9w76". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:37:09 crc kubenswrapper[5002]: I0930 12:37:09.893530 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-0907-account-create-r5dxh" Sep 30 12:37:09 crc kubenswrapper[5002]: I0930 12:37:09.893983 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dcab-account-create-mxdfk" Sep 30 12:37:09 crc kubenswrapper[5002]: I0930 12:37:09.954191 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5xtk\" (UniqueName: \"kubernetes.io/projected/67948d65-c149-45be-914e-30dc00325da6-kube-api-access-l5xtk\") pod \"67948d65-c149-45be-914e-30dc00325da6\" (UID: \"67948d65-c149-45be-914e-30dc00325da6\") " Sep 30 12:37:09 crc kubenswrapper[5002]: I0930 12:37:09.954360 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fmrl6\" (UniqueName: \"kubernetes.io/projected/153e0644-8b78-4c21-9377-a0d4c7cf0848-kube-api-access-fmrl6\") pod \"153e0644-8b78-4c21-9377-a0d4c7cf0848\" (UID: \"153e0644-8b78-4c21-9377-a0d4c7cf0848\") " Sep 30 12:37:09 crc kubenswrapper[5002]: I0930 12:37:09.954736 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b9w76\" (UniqueName: \"kubernetes.io/projected/397fcd72-2dc0-4de0-9a5d-69fc3b14640a-kube-api-access-b9w76\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:09 crc kubenswrapper[5002]: I0930 12:37:09.958755 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/153e0644-8b78-4c21-9377-a0d4c7cf0848-kube-api-access-fmrl6" (OuterVolumeSpecName: "kube-api-access-fmrl6") pod "153e0644-8b78-4c21-9377-a0d4c7cf0848" (UID: "153e0644-8b78-4c21-9377-a0d4c7cf0848"). InnerVolumeSpecName "kube-api-access-fmrl6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:37:09 crc kubenswrapper[5002]: I0930 12:37:09.960593 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67948d65-c149-45be-914e-30dc00325da6-kube-api-access-l5xtk" (OuterVolumeSpecName: "kube-api-access-l5xtk") pod "67948d65-c149-45be-914e-30dc00325da6" (UID: "67948d65-c149-45be-914e-30dc00325da6"). InnerVolumeSpecName "kube-api-access-l5xtk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:37:10 crc kubenswrapper[5002]: I0930 12:37:10.055768 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5xtk\" (UniqueName: \"kubernetes.io/projected/67948d65-c149-45be-914e-30dc00325da6-kube-api-access-l5xtk\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:10 crc kubenswrapper[5002]: I0930 12:37:10.055806 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fmrl6\" (UniqueName: \"kubernetes.io/projected/153e0644-8b78-4c21-9377-a0d4c7cf0848-kube-api-access-fmrl6\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:10 crc kubenswrapper[5002]: I0930 12:37:10.313434 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-0907-account-create-r5dxh" event={"ID":"153e0644-8b78-4c21-9377-a0d4c7cf0848","Type":"ContainerDied","Data":"fe3ea557c96c9ae8904086f50659f5ab27f89db887faed7743cb323bca9b2620"} Sep 30 12:37:10 crc kubenswrapper[5002]: I0930 12:37:10.313487 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fe3ea557c96c9ae8904086f50659f5ab27f89db887faed7743cb323bca9b2620" Sep 30 12:37:10 crc kubenswrapper[5002]: I0930 12:37:10.313518 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-0907-account-create-r5dxh" Sep 30 12:37:10 crc kubenswrapper[5002]: I0930 12:37:10.315356 5002 generic.go:334] "Generic (PLEG): container finished" podID="418c45be-fdf8-41be-899d-a75e1ab0acef" containerID="c1a881029eb753f5bb8fdb3b059612db62c1dec044feab98e868aa941e59a615" exitCode=0 Sep 30 12:37:10 crc kubenswrapper[5002]: I0930 12:37:10.315423 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-88rff" event={"ID":"418c45be-fdf8-41be-899d-a75e1ab0acef","Type":"ContainerDied","Data":"c1a881029eb753f5bb8fdb3b059612db62c1dec044feab98e868aa941e59a615"} Sep 30 12:37:10 crc kubenswrapper[5002]: I0930 12:37:10.316901 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dcab-account-create-mxdfk" Sep 30 12:37:10 crc kubenswrapper[5002]: I0930 12:37:10.316900 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dcab-account-create-mxdfk" event={"ID":"67948d65-c149-45be-914e-30dc00325da6","Type":"ContainerDied","Data":"0982733f7a6e1efc9625ba7e6d99fb7c95af8ca67ab92697e4f91a0aa56a2235"} Sep 30 12:37:10 crc kubenswrapper[5002]: I0930 12:37:10.317029 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0982733f7a6e1efc9625ba7e6d99fb7c95af8ca67ab92697e4f91a0aa56a2235" Sep 30 12:37:10 crc kubenswrapper[5002]: I0930 12:37:10.319207 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-c8b1-account-create-tkp6v" event={"ID":"397fcd72-2dc0-4de0-9a5d-69fc3b14640a","Type":"ContainerDied","Data":"ed122d25344570bfe55d23f9dc0ffa9c5562002727ba70a1b1537fb488f6fcb6"} Sep 30 12:37:10 crc kubenswrapper[5002]: I0930 12:37:10.319241 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed122d25344570bfe55d23f9dc0ffa9c5562002727ba70a1b1537fb488f6fcb6" Sep 30 12:37:10 crc kubenswrapper[5002]: I0930 12:37:10.319340 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-c8b1-account-create-tkp6v" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.055940 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-mvczr"] Sep 30 12:37:12 crc kubenswrapper[5002]: E0930 12:37:12.056520 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="397fcd72-2dc0-4de0-9a5d-69fc3b14640a" containerName="mariadb-account-create" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.056532 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="397fcd72-2dc0-4de0-9a5d-69fc3b14640a" containerName="mariadb-account-create" Sep 30 12:37:12 crc kubenswrapper[5002]: E0930 12:37:12.056548 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67948d65-c149-45be-914e-30dc00325da6" containerName="mariadb-account-create" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.056554 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="67948d65-c149-45be-914e-30dc00325da6" containerName="mariadb-account-create" Sep 30 12:37:12 crc kubenswrapper[5002]: E0930 12:37:12.056588 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="153e0644-8b78-4c21-9377-a0d4c7cf0848" containerName="mariadb-account-create" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.056594 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="153e0644-8b78-4c21-9377-a0d4c7cf0848" containerName="mariadb-account-create" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.056744 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="67948d65-c149-45be-914e-30dc00325da6" containerName="mariadb-account-create" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.056771 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="153e0644-8b78-4c21-9377-a0d4c7cf0848" containerName="mariadb-account-create" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.056781 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="397fcd72-2dc0-4de0-9a5d-69fc3b14640a" containerName="mariadb-account-create" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.057335 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-mvczr" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.064753 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-dzvks" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.064951 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.069705 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-mvczr"] Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.106747 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8687f1f5-15da-479c-8661-948b437fcb33-db-sync-config-data\") pod \"barbican-db-sync-mvczr\" (UID: \"8687f1f5-15da-479c-8661-948b437fcb33\") " pod="openstack/barbican-db-sync-mvczr" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.106790 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8687f1f5-15da-479c-8661-948b437fcb33-combined-ca-bundle\") pod \"barbican-db-sync-mvczr\" (UID: \"8687f1f5-15da-479c-8661-948b437fcb33\") " pod="openstack/barbican-db-sync-mvczr" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.106865 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llnzh\" (UniqueName: \"kubernetes.io/projected/8687f1f5-15da-479c-8661-948b437fcb33-kube-api-access-llnzh\") pod \"barbican-db-sync-mvczr\" (UID: \"8687f1f5-15da-479c-8661-948b437fcb33\") " pod="openstack/barbican-db-sync-mvczr" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.200734 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-vxpxc"] Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.201756 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-vxpxc" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.203965 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.204000 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-88hs7" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.206121 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.208637 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8687f1f5-15da-479c-8661-948b437fcb33-db-sync-config-data\") pod \"barbican-db-sync-mvczr\" (UID: \"8687f1f5-15da-479c-8661-948b437fcb33\") " pod="openstack/barbican-db-sync-mvczr" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.208685 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8687f1f5-15da-479c-8661-948b437fcb33-combined-ca-bundle\") pod \"barbican-db-sync-mvczr\" (UID: \"8687f1f5-15da-479c-8661-948b437fcb33\") " pod="openstack/barbican-db-sync-mvczr" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.208793 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llnzh\" (UniqueName: \"kubernetes.io/projected/8687f1f5-15da-479c-8661-948b437fcb33-kube-api-access-llnzh\") pod \"barbican-db-sync-mvczr\" (UID: \"8687f1f5-15da-479c-8661-948b437fcb33\") " pod="openstack/barbican-db-sync-mvczr" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.212518 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-vxpxc"] Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.216227 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8687f1f5-15da-479c-8661-948b437fcb33-db-sync-config-data\") pod \"barbican-db-sync-mvczr\" (UID: \"8687f1f5-15da-479c-8661-948b437fcb33\") " pod="openstack/barbican-db-sync-mvczr" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.231227 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llnzh\" (UniqueName: \"kubernetes.io/projected/8687f1f5-15da-479c-8661-948b437fcb33-kube-api-access-llnzh\") pod \"barbican-db-sync-mvczr\" (UID: \"8687f1f5-15da-479c-8661-948b437fcb33\") " pod="openstack/barbican-db-sync-mvczr" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.243272 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8687f1f5-15da-479c-8661-948b437fcb33-combined-ca-bundle\") pod \"barbican-db-sync-mvczr\" (UID: \"8687f1f5-15da-479c-8661-948b437fcb33\") " pod="openstack/barbican-db-sync-mvczr" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.310318 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6c398da4-8e97-4ee7-83bb-f958c41fabff-db-sync-config-data\") pod \"cinder-db-sync-vxpxc\" (UID: \"6c398da4-8e97-4ee7-83bb-f958c41fabff\") " pod="openstack/cinder-db-sync-vxpxc" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.310578 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c398da4-8e97-4ee7-83bb-f958c41fabff-config-data\") pod \"cinder-db-sync-vxpxc\" (UID: \"6c398da4-8e97-4ee7-83bb-f958c41fabff\") " pod="openstack/cinder-db-sync-vxpxc" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.310803 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c398da4-8e97-4ee7-83bb-f958c41fabff-scripts\") pod \"cinder-db-sync-vxpxc\" (UID: \"6c398da4-8e97-4ee7-83bb-f958c41fabff\") " pod="openstack/cinder-db-sync-vxpxc" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.310985 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c398da4-8e97-4ee7-83bb-f958c41fabff-combined-ca-bundle\") pod \"cinder-db-sync-vxpxc\" (UID: \"6c398da4-8e97-4ee7-83bb-f958c41fabff\") " pod="openstack/cinder-db-sync-vxpxc" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.311043 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6c398da4-8e97-4ee7-83bb-f958c41fabff-etc-machine-id\") pod \"cinder-db-sync-vxpxc\" (UID: \"6c398da4-8e97-4ee7-83bb-f958c41fabff\") " pod="openstack/cinder-db-sync-vxpxc" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.311112 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjgs2\" (UniqueName: \"kubernetes.io/projected/6c398da4-8e97-4ee7-83bb-f958c41fabff-kube-api-access-fjgs2\") pod \"cinder-db-sync-vxpxc\" (UID: \"6c398da4-8e97-4ee7-83bb-f958c41fabff\") " pod="openstack/cinder-db-sync-vxpxc" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.382541 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-mvczr" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.412985 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c398da4-8e97-4ee7-83bb-f958c41fabff-combined-ca-bundle\") pod \"cinder-db-sync-vxpxc\" (UID: \"6c398da4-8e97-4ee7-83bb-f958c41fabff\") " pod="openstack/cinder-db-sync-vxpxc" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.413316 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6c398da4-8e97-4ee7-83bb-f958c41fabff-etc-machine-id\") pod \"cinder-db-sync-vxpxc\" (UID: \"6c398da4-8e97-4ee7-83bb-f958c41fabff\") " pod="openstack/cinder-db-sync-vxpxc" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.413513 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjgs2\" (UniqueName: \"kubernetes.io/projected/6c398da4-8e97-4ee7-83bb-f958c41fabff-kube-api-access-fjgs2\") pod \"cinder-db-sync-vxpxc\" (UID: \"6c398da4-8e97-4ee7-83bb-f958c41fabff\") " pod="openstack/cinder-db-sync-vxpxc" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.413670 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6c398da4-8e97-4ee7-83bb-f958c41fabff-db-sync-config-data\") pod \"cinder-db-sync-vxpxc\" (UID: \"6c398da4-8e97-4ee7-83bb-f958c41fabff\") " pod="openstack/cinder-db-sync-vxpxc" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.413544 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6c398da4-8e97-4ee7-83bb-f958c41fabff-etc-machine-id\") pod \"cinder-db-sync-vxpxc\" (UID: \"6c398da4-8e97-4ee7-83bb-f958c41fabff\") " pod="openstack/cinder-db-sync-vxpxc" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.413877 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c398da4-8e97-4ee7-83bb-f958c41fabff-config-data\") pod \"cinder-db-sync-vxpxc\" (UID: \"6c398da4-8e97-4ee7-83bb-f958c41fabff\") " pod="openstack/cinder-db-sync-vxpxc" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.414191 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c398da4-8e97-4ee7-83bb-f958c41fabff-scripts\") pod \"cinder-db-sync-vxpxc\" (UID: \"6c398da4-8e97-4ee7-83bb-f958c41fabff\") " pod="openstack/cinder-db-sync-vxpxc" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.435380 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c398da4-8e97-4ee7-83bb-f958c41fabff-scripts\") pod \"cinder-db-sync-vxpxc\" (UID: \"6c398da4-8e97-4ee7-83bb-f958c41fabff\") " pod="openstack/cinder-db-sync-vxpxc" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.436637 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c398da4-8e97-4ee7-83bb-f958c41fabff-combined-ca-bundle\") pod \"cinder-db-sync-vxpxc\" (UID: \"6c398da4-8e97-4ee7-83bb-f958c41fabff\") " pod="openstack/cinder-db-sync-vxpxc" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.437198 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6c398da4-8e97-4ee7-83bb-f958c41fabff-db-sync-config-data\") pod \"cinder-db-sync-vxpxc\" (UID: \"6c398da4-8e97-4ee7-83bb-f958c41fabff\") " pod="openstack/cinder-db-sync-vxpxc" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.438008 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjgs2\" (UniqueName: \"kubernetes.io/projected/6c398da4-8e97-4ee7-83bb-f958c41fabff-kube-api-access-fjgs2\") pod \"cinder-db-sync-vxpxc\" (UID: \"6c398da4-8e97-4ee7-83bb-f958c41fabff\") " pod="openstack/cinder-db-sync-vxpxc" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.443576 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c398da4-8e97-4ee7-83bb-f958c41fabff-config-data\") pod \"cinder-db-sync-vxpxc\" (UID: \"6c398da4-8e97-4ee7-83bb-f958c41fabff\") " pod="openstack/cinder-db-sync-vxpxc" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.444071 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-qwvj9"] Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.445767 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-qwvj9" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.450342 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-8dlzq" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.450589 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.452027 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.465039 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-qwvj9"] Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.516584 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d52650cc-2190-4b6f-8a3f-f506262075d8-config\") pod \"neutron-db-sync-qwvj9\" (UID: \"d52650cc-2190-4b6f-8a3f-f506262075d8\") " pod="openstack/neutron-db-sync-qwvj9" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.516652 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d52650cc-2190-4b6f-8a3f-f506262075d8-combined-ca-bundle\") pod \"neutron-db-sync-qwvj9\" (UID: \"d52650cc-2190-4b6f-8a3f-f506262075d8\") " pod="openstack/neutron-db-sync-qwvj9" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.516791 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42xst\" (UniqueName: \"kubernetes.io/projected/d52650cc-2190-4b6f-8a3f-f506262075d8-kube-api-access-42xst\") pod \"neutron-db-sync-qwvj9\" (UID: \"d52650cc-2190-4b6f-8a3f-f506262075d8\") " pod="openstack/neutron-db-sync-qwvj9" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.599552 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-vxpxc" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.618433 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d52650cc-2190-4b6f-8a3f-f506262075d8-config\") pod \"neutron-db-sync-qwvj9\" (UID: \"d52650cc-2190-4b6f-8a3f-f506262075d8\") " pod="openstack/neutron-db-sync-qwvj9" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.618798 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d52650cc-2190-4b6f-8a3f-f506262075d8-combined-ca-bundle\") pod \"neutron-db-sync-qwvj9\" (UID: \"d52650cc-2190-4b6f-8a3f-f506262075d8\") " pod="openstack/neutron-db-sync-qwvj9" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.619417 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42xst\" (UniqueName: \"kubernetes.io/projected/d52650cc-2190-4b6f-8a3f-f506262075d8-kube-api-access-42xst\") pod \"neutron-db-sync-qwvj9\" (UID: \"d52650cc-2190-4b6f-8a3f-f506262075d8\") " pod="openstack/neutron-db-sync-qwvj9" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.623646 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/d52650cc-2190-4b6f-8a3f-f506262075d8-config\") pod \"neutron-db-sync-qwvj9\" (UID: \"d52650cc-2190-4b6f-8a3f-f506262075d8\") " pod="openstack/neutron-db-sync-qwvj9" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.633367 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d52650cc-2190-4b6f-8a3f-f506262075d8-combined-ca-bundle\") pod \"neutron-db-sync-qwvj9\" (UID: \"d52650cc-2190-4b6f-8a3f-f506262075d8\") " pod="openstack/neutron-db-sync-qwvj9" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.642428 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42xst\" (UniqueName: \"kubernetes.io/projected/d52650cc-2190-4b6f-8a3f-f506262075d8-kube-api-access-42xst\") pod \"neutron-db-sync-qwvj9\" (UID: \"d52650cc-2190-4b6f-8a3f-f506262075d8\") " pod="openstack/neutron-db-sync-qwvj9" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.732435 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-88rff" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.811115 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-qwvj9" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.822964 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-fernet-keys\") pod \"418c45be-fdf8-41be-899d-a75e1ab0acef\" (UID: \"418c45be-fdf8-41be-899d-a75e1ab0acef\") " Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.823332 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-config-data\") pod \"418c45be-fdf8-41be-899d-a75e1ab0acef\" (UID: \"418c45be-fdf8-41be-899d-a75e1ab0acef\") " Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.823363 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7xznn\" (UniqueName: \"kubernetes.io/projected/418c45be-fdf8-41be-899d-a75e1ab0acef-kube-api-access-7xznn\") pod \"418c45be-fdf8-41be-899d-a75e1ab0acef\" (UID: \"418c45be-fdf8-41be-899d-a75e1ab0acef\") " Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.824426 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-combined-ca-bundle\") pod \"418c45be-fdf8-41be-899d-a75e1ab0acef\" (UID: \"418c45be-fdf8-41be-899d-a75e1ab0acef\") " Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.824462 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-scripts\") pod \"418c45be-fdf8-41be-899d-a75e1ab0acef\" (UID: \"418c45be-fdf8-41be-899d-a75e1ab0acef\") " Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.824571 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-credential-keys\") pod \"418c45be-fdf8-41be-899d-a75e1ab0acef\" (UID: \"418c45be-fdf8-41be-899d-a75e1ab0acef\") " Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.829227 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "418c45be-fdf8-41be-899d-a75e1ab0acef" (UID: "418c45be-fdf8-41be-899d-a75e1ab0acef"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.829298 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/418c45be-fdf8-41be-899d-a75e1ab0acef-kube-api-access-7xznn" (OuterVolumeSpecName: "kube-api-access-7xznn") pod "418c45be-fdf8-41be-899d-a75e1ab0acef" (UID: "418c45be-fdf8-41be-899d-a75e1ab0acef"). InnerVolumeSpecName "kube-api-access-7xznn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.830468 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "418c45be-fdf8-41be-899d-a75e1ab0acef" (UID: "418c45be-fdf8-41be-899d-a75e1ab0acef"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.830925 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-scripts" (OuterVolumeSpecName: "scripts") pod "418c45be-fdf8-41be-899d-a75e1ab0acef" (UID: "418c45be-fdf8-41be-899d-a75e1ab0acef"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.854597 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-config-data" (OuterVolumeSpecName: "config-data") pod "418c45be-fdf8-41be-899d-a75e1ab0acef" (UID: "418c45be-fdf8-41be-899d-a75e1ab0acef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.864364 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "418c45be-fdf8-41be-899d-a75e1ab0acef" (UID: "418c45be-fdf8-41be-899d-a75e1ab0acef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.926991 5002 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.927033 5002 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.927043 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.927057 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7xznn\" (UniqueName: \"kubernetes.io/projected/418c45be-fdf8-41be-899d-a75e1ab0acef-kube-api-access-7xznn\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.927070 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:12 crc kubenswrapper[5002]: I0930 12:37:12.927080 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/418c45be-fdf8-41be-899d-a75e1ab0acef-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:13 crc kubenswrapper[5002]: I0930 12:37:13.351633 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-88rff" event={"ID":"418c45be-fdf8-41be-899d-a75e1ab0acef","Type":"ContainerDied","Data":"101beab084b6026dd5988ef7154ae14b81c2dfd297b6030cf1513e41ae7d265a"} Sep 30 12:37:13 crc kubenswrapper[5002]: I0930 12:37:13.351663 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-88rff" Sep 30 12:37:13 crc kubenswrapper[5002]: I0930 12:37:13.351682 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="101beab084b6026dd5988ef7154ae14b81c2dfd297b6030cf1513e41ae7d265a" Sep 30 12:37:13 crc kubenswrapper[5002]: I0930 12:37:13.820155 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-84489c98f8-p85zv"] Sep 30 12:37:13 crc kubenswrapper[5002]: E0930 12:37:13.821037 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="418c45be-fdf8-41be-899d-a75e1ab0acef" containerName="keystone-bootstrap" Sep 30 12:37:13 crc kubenswrapper[5002]: I0930 12:37:13.821109 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="418c45be-fdf8-41be-899d-a75e1ab0acef" containerName="keystone-bootstrap" Sep 30 12:37:13 crc kubenswrapper[5002]: I0930 12:37:13.821347 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="418c45be-fdf8-41be-899d-a75e1ab0acef" containerName="keystone-bootstrap" Sep 30 12:37:13 crc kubenswrapper[5002]: I0930 12:37:13.821957 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-84489c98f8-p85zv" Sep 30 12:37:13 crc kubenswrapper[5002]: I0930 12:37:13.825085 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Sep 30 12:37:13 crc kubenswrapper[5002]: I0930 12:37:13.825091 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 12:37:13 crc kubenswrapper[5002]: I0930 12:37:13.825174 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 12:37:13 crc kubenswrapper[5002]: I0930 12:37:13.825264 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-n9gzw" Sep 30 12:37:13 crc kubenswrapper[5002]: I0930 12:37:13.825328 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Sep 30 12:37:13 crc kubenswrapper[5002]: I0930 12:37:13.825961 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 12:37:13 crc kubenswrapper[5002]: I0930 12:37:13.870813 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-84489c98f8-p85zv"] Sep 30 12:37:13 crc kubenswrapper[5002]: I0930 12:37:13.882660 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:37:13 crc kubenswrapper[5002]: I0930 12:37:13.882719 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:37:13 crc kubenswrapper[5002]: I0930 12:37:13.947440 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jltk\" (UniqueName: \"kubernetes.io/projected/1c943245-6e36-4812-9694-48a5c2747a90-kube-api-access-4jltk\") pod \"keystone-84489c98f8-p85zv\" (UID: \"1c943245-6e36-4812-9694-48a5c2747a90\") " pod="openstack/keystone-84489c98f8-p85zv" Sep 30 12:37:13 crc kubenswrapper[5002]: I0930 12:37:13.947498 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c943245-6e36-4812-9694-48a5c2747a90-internal-tls-certs\") pod \"keystone-84489c98f8-p85zv\" (UID: \"1c943245-6e36-4812-9694-48a5c2747a90\") " pod="openstack/keystone-84489c98f8-p85zv" Sep 30 12:37:13 crc kubenswrapper[5002]: I0930 12:37:13.947711 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c943245-6e36-4812-9694-48a5c2747a90-scripts\") pod \"keystone-84489c98f8-p85zv\" (UID: \"1c943245-6e36-4812-9694-48a5c2747a90\") " pod="openstack/keystone-84489c98f8-p85zv" Sep 30 12:37:13 crc kubenswrapper[5002]: I0930 12:37:13.947807 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1c943245-6e36-4812-9694-48a5c2747a90-fernet-keys\") pod \"keystone-84489c98f8-p85zv\" (UID: \"1c943245-6e36-4812-9694-48a5c2747a90\") " pod="openstack/keystone-84489c98f8-p85zv" Sep 30 12:37:13 crc kubenswrapper[5002]: I0930 12:37:13.947829 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c943245-6e36-4812-9694-48a5c2747a90-combined-ca-bundle\") pod \"keystone-84489c98f8-p85zv\" (UID: \"1c943245-6e36-4812-9694-48a5c2747a90\") " pod="openstack/keystone-84489c98f8-p85zv" Sep 30 12:37:13 crc kubenswrapper[5002]: I0930 12:37:13.947877 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c943245-6e36-4812-9694-48a5c2747a90-config-data\") pod \"keystone-84489c98f8-p85zv\" (UID: \"1c943245-6e36-4812-9694-48a5c2747a90\") " pod="openstack/keystone-84489c98f8-p85zv" Sep 30 12:37:13 crc kubenswrapper[5002]: I0930 12:37:13.947962 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c943245-6e36-4812-9694-48a5c2747a90-public-tls-certs\") pod \"keystone-84489c98f8-p85zv\" (UID: \"1c943245-6e36-4812-9694-48a5c2747a90\") " pod="openstack/keystone-84489c98f8-p85zv" Sep 30 12:37:13 crc kubenswrapper[5002]: I0930 12:37:13.948021 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1c943245-6e36-4812-9694-48a5c2747a90-credential-keys\") pod \"keystone-84489c98f8-p85zv\" (UID: \"1c943245-6e36-4812-9694-48a5c2747a90\") " pod="openstack/keystone-84489c98f8-p85zv" Sep 30 12:37:14 crc kubenswrapper[5002]: I0930 12:37:14.023603 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-66c799f4f6-jprmr" Sep 30 12:37:14 crc kubenswrapper[5002]: I0930 12:37:14.023674 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-66c799f4f6-jprmr" Sep 30 12:37:14 crc kubenswrapper[5002]: I0930 12:37:14.049371 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1c943245-6e36-4812-9694-48a5c2747a90-fernet-keys\") pod \"keystone-84489c98f8-p85zv\" (UID: \"1c943245-6e36-4812-9694-48a5c2747a90\") " pod="openstack/keystone-84489c98f8-p85zv" Sep 30 12:37:14 crc kubenswrapper[5002]: I0930 12:37:14.049421 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c943245-6e36-4812-9694-48a5c2747a90-combined-ca-bundle\") pod \"keystone-84489c98f8-p85zv\" (UID: \"1c943245-6e36-4812-9694-48a5c2747a90\") " pod="openstack/keystone-84489c98f8-p85zv" Sep 30 12:37:14 crc kubenswrapper[5002]: I0930 12:37:14.049452 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c943245-6e36-4812-9694-48a5c2747a90-config-data\") pod \"keystone-84489c98f8-p85zv\" (UID: \"1c943245-6e36-4812-9694-48a5c2747a90\") " pod="openstack/keystone-84489c98f8-p85zv" Sep 30 12:37:14 crc kubenswrapper[5002]: I0930 12:37:14.049512 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c943245-6e36-4812-9694-48a5c2747a90-public-tls-certs\") pod \"keystone-84489c98f8-p85zv\" (UID: \"1c943245-6e36-4812-9694-48a5c2747a90\") " pod="openstack/keystone-84489c98f8-p85zv" Sep 30 12:37:14 crc kubenswrapper[5002]: I0930 12:37:14.049556 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1c943245-6e36-4812-9694-48a5c2747a90-credential-keys\") pod \"keystone-84489c98f8-p85zv\" (UID: \"1c943245-6e36-4812-9694-48a5c2747a90\") " pod="openstack/keystone-84489c98f8-p85zv" Sep 30 12:37:14 crc kubenswrapper[5002]: I0930 12:37:14.049741 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jltk\" (UniqueName: \"kubernetes.io/projected/1c943245-6e36-4812-9694-48a5c2747a90-kube-api-access-4jltk\") pod \"keystone-84489c98f8-p85zv\" (UID: \"1c943245-6e36-4812-9694-48a5c2747a90\") " pod="openstack/keystone-84489c98f8-p85zv" Sep 30 12:37:14 crc kubenswrapper[5002]: I0930 12:37:14.049774 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c943245-6e36-4812-9694-48a5c2747a90-internal-tls-certs\") pod \"keystone-84489c98f8-p85zv\" (UID: \"1c943245-6e36-4812-9694-48a5c2747a90\") " pod="openstack/keystone-84489c98f8-p85zv" Sep 30 12:37:14 crc kubenswrapper[5002]: I0930 12:37:14.049843 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c943245-6e36-4812-9694-48a5c2747a90-scripts\") pod \"keystone-84489c98f8-p85zv\" (UID: \"1c943245-6e36-4812-9694-48a5c2747a90\") " pod="openstack/keystone-84489c98f8-p85zv" Sep 30 12:37:14 crc kubenswrapper[5002]: I0930 12:37:14.054034 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c943245-6e36-4812-9694-48a5c2747a90-scripts\") pod \"keystone-84489c98f8-p85zv\" (UID: \"1c943245-6e36-4812-9694-48a5c2747a90\") " pod="openstack/keystone-84489c98f8-p85zv" Sep 30 12:37:14 crc kubenswrapper[5002]: I0930 12:37:14.054048 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c943245-6e36-4812-9694-48a5c2747a90-public-tls-certs\") pod \"keystone-84489c98f8-p85zv\" (UID: \"1c943245-6e36-4812-9694-48a5c2747a90\") " pod="openstack/keystone-84489c98f8-p85zv" Sep 30 12:37:14 crc kubenswrapper[5002]: I0930 12:37:14.054159 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c943245-6e36-4812-9694-48a5c2747a90-combined-ca-bundle\") pod \"keystone-84489c98f8-p85zv\" (UID: \"1c943245-6e36-4812-9694-48a5c2747a90\") " pod="openstack/keystone-84489c98f8-p85zv" Sep 30 12:37:14 crc kubenswrapper[5002]: I0930 12:37:14.055436 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c943245-6e36-4812-9694-48a5c2747a90-config-data\") pod \"keystone-84489c98f8-p85zv\" (UID: \"1c943245-6e36-4812-9694-48a5c2747a90\") " pod="openstack/keystone-84489c98f8-p85zv" Sep 30 12:37:14 crc kubenswrapper[5002]: I0930 12:37:14.062016 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1c943245-6e36-4812-9694-48a5c2747a90-fernet-keys\") pod \"keystone-84489c98f8-p85zv\" (UID: \"1c943245-6e36-4812-9694-48a5c2747a90\") " pod="openstack/keystone-84489c98f8-p85zv" Sep 30 12:37:14 crc kubenswrapper[5002]: I0930 12:37:14.062144 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c943245-6e36-4812-9694-48a5c2747a90-internal-tls-certs\") pod \"keystone-84489c98f8-p85zv\" (UID: \"1c943245-6e36-4812-9694-48a5c2747a90\") " pod="openstack/keystone-84489c98f8-p85zv" Sep 30 12:37:14 crc kubenswrapper[5002]: I0930 12:37:14.072965 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1c943245-6e36-4812-9694-48a5c2747a90-credential-keys\") pod \"keystone-84489c98f8-p85zv\" (UID: \"1c943245-6e36-4812-9694-48a5c2747a90\") " pod="openstack/keystone-84489c98f8-p85zv" Sep 30 12:37:14 crc kubenswrapper[5002]: I0930 12:37:14.073101 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jltk\" (UniqueName: \"kubernetes.io/projected/1c943245-6e36-4812-9694-48a5c2747a90-kube-api-access-4jltk\") pod \"keystone-84489c98f8-p85zv\" (UID: \"1c943245-6e36-4812-9694-48a5c2747a90\") " pod="openstack/keystone-84489c98f8-p85zv" Sep 30 12:37:14 crc kubenswrapper[5002]: I0930 12:37:14.135437 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-84489c98f8-p85zv" Sep 30 12:37:15 crc kubenswrapper[5002]: I0930 12:37:15.605896 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 12:37:15 crc kubenswrapper[5002]: I0930 12:37:15.606654 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 12:37:15 crc kubenswrapper[5002]: I0930 12:37:15.664336 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 12:37:15 crc kubenswrapper[5002]: I0930 12:37:15.667545 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 12:37:15 crc kubenswrapper[5002]: I0930 12:37:15.672933 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 12:37:15 crc kubenswrapper[5002]: I0930 12:37:15.674603 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 12:37:15 crc kubenswrapper[5002]: I0930 12:37:15.753851 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 12:37:15 crc kubenswrapper[5002]: I0930 12:37:15.840756 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 12:37:16 crc kubenswrapper[5002]: I0930 12:37:16.067527 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-84489c98f8-p85zv"] Sep 30 12:37:16 crc kubenswrapper[5002]: I0930 12:37:16.074987 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-mvczr"] Sep 30 12:37:16 crc kubenswrapper[5002]: I0930 12:37:16.082028 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-vxpxc"] Sep 30 12:37:16 crc kubenswrapper[5002]: I0930 12:37:16.228600 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-qwvj9"] Sep 30 12:37:16 crc kubenswrapper[5002]: I0930 12:37:16.381307 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-vxpxc" event={"ID":"6c398da4-8e97-4ee7-83bb-f958c41fabff","Type":"ContainerStarted","Data":"8ba4da71cb92298baa03729a78378de20cc7ecf6c833aec55776935cb5eb16c0"} Sep 30 12:37:16 crc kubenswrapper[5002]: I0930 12:37:16.383542 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22178f02-1a64-4a88-a564-fe143875e7df","Type":"ContainerStarted","Data":"3a98ed24d60c32ebb7cf23132b2524ad92aff0e6c4fcc8c33abd561d0b572d78"} Sep 30 12:37:16 crc kubenswrapper[5002]: I0930 12:37:16.384776 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-qwvj9" event={"ID":"d52650cc-2190-4b6f-8a3f-f506262075d8","Type":"ContainerStarted","Data":"9ffa9a9f49b70ab244e75af3ed34c8db009cbefcc0d031929dfd1065d975d8b9"} Sep 30 12:37:16 crc kubenswrapper[5002]: I0930 12:37:16.386160 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-mvczr" event={"ID":"8687f1f5-15da-479c-8661-948b437fcb33","Type":"ContainerStarted","Data":"cb6d5712c0eaf26bdee35205fb05d670bdbae107d7425cf1f8998c2c05aa8af8"} Sep 30 12:37:16 crc kubenswrapper[5002]: I0930 12:37:16.387574 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-84489c98f8-p85zv" event={"ID":"1c943245-6e36-4812-9694-48a5c2747a90","Type":"ContainerStarted","Data":"13e03c15f87c0acf99e739161a406a7c62dd9409bea366a34a4f55479a3d9faa"} Sep 30 12:37:16 crc kubenswrapper[5002]: I0930 12:37:16.387595 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-84489c98f8-p85zv" event={"ID":"1c943245-6e36-4812-9694-48a5c2747a90","Type":"ContainerStarted","Data":"e4f5a97d74936e4e667b298285ab46dba27ad618c15523ea3bfdac90d5494195"} Sep 30 12:37:16 crc kubenswrapper[5002]: I0930 12:37:16.388216 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 12:37:16 crc kubenswrapper[5002]: I0930 12:37:16.390764 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 12:37:16 crc kubenswrapper[5002]: I0930 12:37:16.391404 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 12:37:16 crc kubenswrapper[5002]: I0930 12:37:16.391435 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 12:37:16 crc kubenswrapper[5002]: I0930 12:37:16.422320 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-84489c98f8-p85zv" podStartSLOduration=3.422292297 podStartE2EDuration="3.422292297s" podCreationTimestamp="2025-09-30 12:37:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:37:16.407225923 +0000 UTC m=+1010.656908069" watchObservedRunningTime="2025-09-30 12:37:16.422292297 +0000 UTC m=+1010.671974473" Sep 30 12:37:17 crc kubenswrapper[5002]: I0930 12:37:17.436611 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-qwvj9" event={"ID":"d52650cc-2190-4b6f-8a3f-f506262075d8","Type":"ContainerStarted","Data":"8a95094e63785df2c49d0117ed1d9bc8bff30ac583aa71825afe7b2e3d5ed8b6"} Sep 30 12:37:17 crc kubenswrapper[5002]: I0930 12:37:17.437332 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-84489c98f8-p85zv" Sep 30 12:37:17 crc kubenswrapper[5002]: I0930 12:37:17.460061 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-qwvj9" podStartSLOduration=5.46004055 podStartE2EDuration="5.46004055s" podCreationTimestamp="2025-09-30 12:37:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:37:17.453079383 +0000 UTC m=+1011.702761539" watchObservedRunningTime="2025-09-30 12:37:17.46004055 +0000 UTC m=+1011.709722696" Sep 30 12:37:17 crc kubenswrapper[5002]: I0930 12:37:17.499637 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-56d859b67f-fr8wt" Sep 30 12:37:18 crc kubenswrapper[5002]: I0930 12:37:18.887767 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 12:37:18 crc kubenswrapper[5002]: I0930 12:37:18.888031 5002 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 12:37:19 crc kubenswrapper[5002]: I0930 12:37:19.105602 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 12:37:19 crc kubenswrapper[5002]: I0930 12:37:19.106039 5002 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 12:37:19 crc kubenswrapper[5002]: I0930 12:37:19.110957 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 12:37:19 crc kubenswrapper[5002]: I0930 12:37:19.366967 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 12:37:23 crc kubenswrapper[5002]: I0930 12:37:23.882189 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-f8777b74-fpwh2" podUID="fd4d1d88-c894-496a-b2ee-00bf80fa2415" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Sep 30 12:37:24 crc kubenswrapper[5002]: I0930 12:37:24.025153 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-66c799f4f6-jprmr" podUID="7c12a4dd-a3df-4106-ab48-b628b89b3277" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Sep 30 12:37:36 crc kubenswrapper[5002]: I0930 12:37:36.642139 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:37:36 crc kubenswrapper[5002]: I0930 12:37:36.852100 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-66c799f4f6-jprmr" Sep 30 12:37:38 crc kubenswrapper[5002]: E0930 12:37:38.161236 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-placement-api:current-podified" Sep 30 12:37:38 crc kubenswrapper[5002]: E0930 12:37:38.161394 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9pmcw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-bnf58_openstack(4fdf0c77-68ae-41ff-b6b5-122baa461b8c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 12:37:38 crc kubenswrapper[5002]: E0930 12:37:38.162548 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-bnf58" podUID="4fdf0c77-68ae-41ff-b6b5-122baa461b8c" Sep 30 12:37:38 crc kubenswrapper[5002]: E0930 12:37:38.215651 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/ubi9/httpd-24:latest" Sep 30 12:37:38 crc kubenswrapper[5002]: E0930 12:37:38.215890 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:proxy-httpd,Image:registry.redhat.io/ubi9/httpd-24:latest,Command:[/usr/sbin/httpd],Args:[-DFOREGROUND],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:proxy-httpd,HostPort:0,ContainerPort:3000,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/httpd/conf/httpd.conf,SubPath:httpd.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/httpd/conf.d/ssl.conf,SubPath:ssl.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:run-httpd,ReadOnly:false,MountPath:/run/httpd,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:log-httpd,ReadOnly:false,MountPath:/var/log/httpd,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v27pl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 3000 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 3000 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(22178f02-1a64-4a88-a564-fe143875e7df): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 12:37:38 crc kubenswrapper[5002]: E0930 12:37:38.217130 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"proxy-httpd\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"]" pod="openstack/ceilometer-0" podUID="22178f02-1a64-4a88-a564-fe143875e7df" Sep 30 12:37:38 crc kubenswrapper[5002]: I0930 12:37:38.418222 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:37:38 crc kubenswrapper[5002]: I0930 12:37:38.660564 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="22178f02-1a64-4a88-a564-fe143875e7df" containerName="sg-core" containerID="cri-o://3a98ed24d60c32ebb7cf23132b2524ad92aff0e6c4fcc8c33abd561d0b572d78" gracePeriod=30 Sep 30 12:37:38 crc kubenswrapper[5002]: I0930 12:37:38.660708 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="22178f02-1a64-4a88-a564-fe143875e7df" containerName="ceilometer-notification-agent" containerID="cri-o://654d1adc3ddf20f5867bd8c29763dc1ed55ab3326b36f37943a8648cb889b6af" gracePeriod=30 Sep 30 12:37:38 crc kubenswrapper[5002]: I0930 12:37:38.859662 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-66c799f4f6-jprmr" Sep 30 12:37:38 crc kubenswrapper[5002]: I0930 12:37:38.920682 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-f8777b74-fpwh2"] Sep 30 12:37:38 crc kubenswrapper[5002]: I0930 12:37:38.920905 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-f8777b74-fpwh2" podUID="fd4d1d88-c894-496a-b2ee-00bf80fa2415" containerName="horizon-log" containerID="cri-o://20378dc396b5e4c91b822c9380bf06eb3c519550f74b233a99bb346dd6c21296" gracePeriod=30 Sep 30 12:37:38 crc kubenswrapper[5002]: I0930 12:37:38.921365 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-f8777b74-fpwh2" podUID="fd4d1d88-c894-496a-b2ee-00bf80fa2415" containerName="horizon" containerID="cri-o://8b84e3b52cde98cea92f145d0960b70ae2e34eb1f70dee734351f11ecd31ab55" gracePeriod=30 Sep 30 12:37:39 crc kubenswrapper[5002]: I0930 12:37:39.670845 5002 generic.go:334] "Generic (PLEG): container finished" podID="22178f02-1a64-4a88-a564-fe143875e7df" containerID="3a98ed24d60c32ebb7cf23132b2524ad92aff0e6c4fcc8c33abd561d0b572d78" exitCode=2 Sep 30 12:37:39 crc kubenswrapper[5002]: I0930 12:37:39.670905 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22178f02-1a64-4a88-a564-fe143875e7df","Type":"ContainerDied","Data":"3a98ed24d60c32ebb7cf23132b2524ad92aff0e6c4fcc8c33abd561d0b572d78"} Sep 30 12:37:39 crc kubenswrapper[5002]: I0930 12:37:39.673117 5002 generic.go:334] "Generic (PLEG): container finished" podID="b44591be-b0af-4144-a646-7739c7ea1e69" containerID="9d400a84a73488d328314feecae4c3308da84f67dd6cdae0ad3616a9b96c4712" exitCode=137 Sep 30 12:37:39 crc kubenswrapper[5002]: I0930 12:37:39.673147 5002 generic.go:334] "Generic (PLEG): container finished" podID="b44591be-b0af-4144-a646-7739c7ea1e69" containerID="d84b1612d7f900cbe04d8ca03dbaf098a8a8031bad4cd4bfc918107c0aba8633" exitCode=137 Sep 30 12:37:39 crc kubenswrapper[5002]: I0930 12:37:39.673165 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-56d859b67f-fr8wt" event={"ID":"b44591be-b0af-4144-a646-7739c7ea1e69","Type":"ContainerDied","Data":"9d400a84a73488d328314feecae4c3308da84f67dd6cdae0ad3616a9b96c4712"} Sep 30 12:37:39 crc kubenswrapper[5002]: I0930 12:37:39.673184 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-56d859b67f-fr8wt" event={"ID":"b44591be-b0af-4144-a646-7739c7ea1e69","Type":"ContainerDied","Data":"d84b1612d7f900cbe04d8ca03dbaf098a8a8031bad4cd4bfc918107c0aba8633"} Sep 30 12:37:40 crc kubenswrapper[5002]: I0930 12:37:40.658435 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-56d859b67f-fr8wt" Sep 30 12:37:40 crc kubenswrapper[5002]: I0930 12:37:40.685070 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b44591be-b0af-4144-a646-7739c7ea1e69-logs\") pod \"b44591be-b0af-4144-a646-7739c7ea1e69\" (UID: \"b44591be-b0af-4144-a646-7739c7ea1e69\") " Sep 30 12:37:40 crc kubenswrapper[5002]: I0930 12:37:40.685278 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b44591be-b0af-4144-a646-7739c7ea1e69-scripts\") pod \"b44591be-b0af-4144-a646-7739c7ea1e69\" (UID: \"b44591be-b0af-4144-a646-7739c7ea1e69\") " Sep 30 12:37:40 crc kubenswrapper[5002]: I0930 12:37:40.685880 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b44591be-b0af-4144-a646-7739c7ea1e69-logs" (OuterVolumeSpecName: "logs") pod "b44591be-b0af-4144-a646-7739c7ea1e69" (UID: "b44591be-b0af-4144-a646-7739c7ea1e69"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:37:40 crc kubenswrapper[5002]: I0930 12:37:40.686155 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jfmqz\" (UniqueName: \"kubernetes.io/projected/b44591be-b0af-4144-a646-7739c7ea1e69-kube-api-access-jfmqz\") pod \"b44591be-b0af-4144-a646-7739c7ea1e69\" (UID: \"b44591be-b0af-4144-a646-7739c7ea1e69\") " Sep 30 12:37:40 crc kubenswrapper[5002]: I0930 12:37:40.686207 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b44591be-b0af-4144-a646-7739c7ea1e69-config-data\") pod \"b44591be-b0af-4144-a646-7739c7ea1e69\" (UID: \"b44591be-b0af-4144-a646-7739c7ea1e69\") " Sep 30 12:37:40 crc kubenswrapper[5002]: I0930 12:37:40.686243 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b44591be-b0af-4144-a646-7739c7ea1e69-horizon-secret-key\") pod \"b44591be-b0af-4144-a646-7739c7ea1e69\" (UID: \"b44591be-b0af-4144-a646-7739c7ea1e69\") " Sep 30 12:37:40 crc kubenswrapper[5002]: I0930 12:37:40.687973 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b44591be-b0af-4144-a646-7739c7ea1e69-logs\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:40 crc kubenswrapper[5002]: I0930 12:37:40.692048 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b44591be-b0af-4144-a646-7739c7ea1e69-kube-api-access-jfmqz" (OuterVolumeSpecName: "kube-api-access-jfmqz") pod "b44591be-b0af-4144-a646-7739c7ea1e69" (UID: "b44591be-b0af-4144-a646-7739c7ea1e69"). InnerVolumeSpecName "kube-api-access-jfmqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:37:40 crc kubenswrapper[5002]: I0930 12:37:40.718643 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b44591be-b0af-4144-a646-7739c7ea1e69-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "b44591be-b0af-4144-a646-7739c7ea1e69" (UID: "b44591be-b0af-4144-a646-7739c7ea1e69"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:40 crc kubenswrapper[5002]: I0930 12:37:40.722751 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-mvczr" event={"ID":"8687f1f5-15da-479c-8661-948b437fcb33","Type":"ContainerStarted","Data":"239dcf3d4f1d76af4b15a736976090ea497fc3006086fecbc2b06e233d1d7553"} Sep 30 12:37:40 crc kubenswrapper[5002]: E0930 12:37:40.722759 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Sep 30 12:37:40 crc kubenswrapper[5002]: I0930 12:37:40.722879 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b44591be-b0af-4144-a646-7739c7ea1e69-scripts" (OuterVolumeSpecName: "scripts") pod "b44591be-b0af-4144-a646-7739c7ea1e69" (UID: "b44591be-b0af-4144-a646-7739c7ea1e69"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:37:40 crc kubenswrapper[5002]: E0930 12:37:40.722998 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fjgs2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-vxpxc_openstack(6c398da4-8e97-4ee7-83bb-f958c41fabff): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 12:37:40 crc kubenswrapper[5002]: E0930 12:37:40.724152 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-vxpxc" podUID="6c398da4-8e97-4ee7-83bb-f958c41fabff" Sep 30 12:37:40 crc kubenswrapper[5002]: I0930 12:37:40.725855 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-56d859b67f-fr8wt" event={"ID":"b44591be-b0af-4144-a646-7739c7ea1e69","Type":"ContainerDied","Data":"32461133d62736c3a38370169d40920b6041ffc54b0fd4e9d1ce4b46792d3672"} Sep 30 12:37:40 crc kubenswrapper[5002]: I0930 12:37:40.725916 5002 scope.go:117] "RemoveContainer" containerID="9d400a84a73488d328314feecae4c3308da84f67dd6cdae0ad3616a9b96c4712" Sep 30 12:37:40 crc kubenswrapper[5002]: I0930 12:37:40.726108 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-56d859b67f-fr8wt" Sep 30 12:37:40 crc kubenswrapper[5002]: I0930 12:37:40.747929 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-mvczr" podStartSLOduration=4.713379711 podStartE2EDuration="28.747902627s" podCreationTimestamp="2025-09-30 12:37:12 +0000 UTC" firstStartedPulling="2025-09-30 12:37:16.085131049 +0000 UTC m=+1010.334813195" lastFinishedPulling="2025-09-30 12:37:40.119653955 +0000 UTC m=+1034.369336111" observedRunningTime="2025-09-30 12:37:40.743650379 +0000 UTC m=+1034.993332525" watchObservedRunningTime="2025-09-30 12:37:40.747902627 +0000 UTC m=+1034.997584803" Sep 30 12:37:40 crc kubenswrapper[5002]: I0930 12:37:40.764118 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b44591be-b0af-4144-a646-7739c7ea1e69-config-data" (OuterVolumeSpecName: "config-data") pod "b44591be-b0af-4144-a646-7739c7ea1e69" (UID: "b44591be-b0af-4144-a646-7739c7ea1e69"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:37:40 crc kubenswrapper[5002]: I0930 12:37:40.790608 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b44591be-b0af-4144-a646-7739c7ea1e69-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:40 crc kubenswrapper[5002]: I0930 12:37:40.790639 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jfmqz\" (UniqueName: \"kubernetes.io/projected/b44591be-b0af-4144-a646-7739c7ea1e69-kube-api-access-jfmqz\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:40 crc kubenswrapper[5002]: I0930 12:37:40.790651 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b44591be-b0af-4144-a646-7739c7ea1e69-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:40 crc kubenswrapper[5002]: I0930 12:37:40.790662 5002 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b44591be-b0af-4144-a646-7739c7ea1e69-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:40 crc kubenswrapper[5002]: I0930 12:37:40.894010 5002 scope.go:117] "RemoveContainer" containerID="d84b1612d7f900cbe04d8ca03dbaf098a8a8031bad4cd4bfc918107c0aba8633" Sep 30 12:37:41 crc kubenswrapper[5002]: I0930 12:37:41.072048 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-56d859b67f-fr8wt"] Sep 30 12:37:41 crc kubenswrapper[5002]: I0930 12:37:41.084733 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-56d859b67f-fr8wt"] Sep 30 12:37:41 crc kubenswrapper[5002]: E0930 12:37:41.735563 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-vxpxc" podUID="6c398da4-8e97-4ee7-83bb-f958c41fabff" Sep 30 12:37:42 crc kubenswrapper[5002]: I0930 12:37:42.687909 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b44591be-b0af-4144-a646-7739c7ea1e69" path="/var/lib/kubelet/pods/b44591be-b0af-4144-a646-7739c7ea1e69/volumes" Sep 30 12:37:42 crc kubenswrapper[5002]: I0930 12:37:42.748534 5002 generic.go:334] "Generic (PLEG): container finished" podID="fd4d1d88-c894-496a-b2ee-00bf80fa2415" containerID="8b84e3b52cde98cea92f145d0960b70ae2e34eb1f70dee734351f11ecd31ab55" exitCode=0 Sep 30 12:37:42 crc kubenswrapper[5002]: I0930 12:37:42.748590 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-f8777b74-fpwh2" event={"ID":"fd4d1d88-c894-496a-b2ee-00bf80fa2415","Type":"ContainerDied","Data":"8b84e3b52cde98cea92f145d0960b70ae2e34eb1f70dee734351f11ecd31ab55"} Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.096105 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.131397 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22178f02-1a64-4a88-a564-fe143875e7df-combined-ca-bundle\") pod \"22178f02-1a64-4a88-a564-fe143875e7df\" (UID: \"22178f02-1a64-4a88-a564-fe143875e7df\") " Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.131449 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v27pl\" (UniqueName: \"kubernetes.io/projected/22178f02-1a64-4a88-a564-fe143875e7df-kube-api-access-v27pl\") pod \"22178f02-1a64-4a88-a564-fe143875e7df\" (UID: \"22178f02-1a64-4a88-a564-fe143875e7df\") " Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.131526 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22178f02-1a64-4a88-a564-fe143875e7df-run-httpd\") pod \"22178f02-1a64-4a88-a564-fe143875e7df\" (UID: \"22178f02-1a64-4a88-a564-fe143875e7df\") " Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.131564 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22178f02-1a64-4a88-a564-fe143875e7df-scripts\") pod \"22178f02-1a64-4a88-a564-fe143875e7df\" (UID: \"22178f02-1a64-4a88-a564-fe143875e7df\") " Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.131596 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22178f02-1a64-4a88-a564-fe143875e7df-log-httpd\") pod \"22178f02-1a64-4a88-a564-fe143875e7df\" (UID: \"22178f02-1a64-4a88-a564-fe143875e7df\") " Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.131632 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22178f02-1a64-4a88-a564-fe143875e7df-config-data\") pod \"22178f02-1a64-4a88-a564-fe143875e7df\" (UID: \"22178f02-1a64-4a88-a564-fe143875e7df\") " Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.131694 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/22178f02-1a64-4a88-a564-fe143875e7df-sg-core-conf-yaml\") pod \"22178f02-1a64-4a88-a564-fe143875e7df\" (UID: \"22178f02-1a64-4a88-a564-fe143875e7df\") " Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.132042 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22178f02-1a64-4a88-a564-fe143875e7df-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "22178f02-1a64-4a88-a564-fe143875e7df" (UID: "22178f02-1a64-4a88-a564-fe143875e7df"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.132160 5002 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22178f02-1a64-4a88-a564-fe143875e7df-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.132326 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22178f02-1a64-4a88-a564-fe143875e7df-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "22178f02-1a64-4a88-a564-fe143875e7df" (UID: "22178f02-1a64-4a88-a564-fe143875e7df"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.138470 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22178f02-1a64-4a88-a564-fe143875e7df-scripts" (OuterVolumeSpecName: "scripts") pod "22178f02-1a64-4a88-a564-fe143875e7df" (UID: "22178f02-1a64-4a88-a564-fe143875e7df"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.140848 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22178f02-1a64-4a88-a564-fe143875e7df-kube-api-access-v27pl" (OuterVolumeSpecName: "kube-api-access-v27pl") pod "22178f02-1a64-4a88-a564-fe143875e7df" (UID: "22178f02-1a64-4a88-a564-fe143875e7df"). InnerVolumeSpecName "kube-api-access-v27pl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.177593 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22178f02-1a64-4a88-a564-fe143875e7df-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "22178f02-1a64-4a88-a564-fe143875e7df" (UID: "22178f02-1a64-4a88-a564-fe143875e7df"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.179207 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22178f02-1a64-4a88-a564-fe143875e7df-config-data" (OuterVolumeSpecName: "config-data") pod "22178f02-1a64-4a88-a564-fe143875e7df" (UID: "22178f02-1a64-4a88-a564-fe143875e7df"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.189353 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22178f02-1a64-4a88-a564-fe143875e7df-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "22178f02-1a64-4a88-a564-fe143875e7df" (UID: "22178f02-1a64-4a88-a564-fe143875e7df"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.234175 5002 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/22178f02-1a64-4a88-a564-fe143875e7df-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.234252 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22178f02-1a64-4a88-a564-fe143875e7df-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.234279 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v27pl\" (UniqueName: \"kubernetes.io/projected/22178f02-1a64-4a88-a564-fe143875e7df-kube-api-access-v27pl\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.234306 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22178f02-1a64-4a88-a564-fe143875e7df-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.234329 5002 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22178f02-1a64-4a88-a564-fe143875e7df-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.234350 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22178f02-1a64-4a88-a564-fe143875e7df-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.760593 5002 generic.go:334] "Generic (PLEG): container finished" podID="22178f02-1a64-4a88-a564-fe143875e7df" containerID="654d1adc3ddf20f5867bd8c29763dc1ed55ab3326b36f37943a8648cb889b6af" exitCode=0 Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.760642 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22178f02-1a64-4a88-a564-fe143875e7df","Type":"ContainerDied","Data":"654d1adc3ddf20f5867bd8c29763dc1ed55ab3326b36f37943a8648cb889b6af"} Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.760673 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22178f02-1a64-4a88-a564-fe143875e7df","Type":"ContainerDied","Data":"63ecdbf9b209afe0b1d25296a77baa9be2af34eb735fa2fe1ff9a460d213ab7d"} Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.760693 5002 scope.go:117] "RemoveContainer" containerID="3a98ed24d60c32ebb7cf23132b2524ad92aff0e6c4fcc8c33abd561d0b572d78" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.760848 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.794326 5002 scope.go:117] "RemoveContainer" containerID="654d1adc3ddf20f5867bd8c29763dc1ed55ab3326b36f37943a8648cb889b6af" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.819548 5002 scope.go:117] "RemoveContainer" containerID="3a98ed24d60c32ebb7cf23132b2524ad92aff0e6c4fcc8c33abd561d0b572d78" Sep 30 12:37:43 crc kubenswrapper[5002]: E0930 12:37:43.820283 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a98ed24d60c32ebb7cf23132b2524ad92aff0e6c4fcc8c33abd561d0b572d78\": container with ID starting with 3a98ed24d60c32ebb7cf23132b2524ad92aff0e6c4fcc8c33abd561d0b572d78 not found: ID does not exist" containerID="3a98ed24d60c32ebb7cf23132b2524ad92aff0e6c4fcc8c33abd561d0b572d78" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.820318 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a98ed24d60c32ebb7cf23132b2524ad92aff0e6c4fcc8c33abd561d0b572d78"} err="failed to get container status \"3a98ed24d60c32ebb7cf23132b2524ad92aff0e6c4fcc8c33abd561d0b572d78\": rpc error: code = NotFound desc = could not find container \"3a98ed24d60c32ebb7cf23132b2524ad92aff0e6c4fcc8c33abd561d0b572d78\": container with ID starting with 3a98ed24d60c32ebb7cf23132b2524ad92aff0e6c4fcc8c33abd561d0b572d78 not found: ID does not exist" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.820342 5002 scope.go:117] "RemoveContainer" containerID="654d1adc3ddf20f5867bd8c29763dc1ed55ab3326b36f37943a8648cb889b6af" Sep 30 12:37:43 crc kubenswrapper[5002]: E0930 12:37:43.822875 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"654d1adc3ddf20f5867bd8c29763dc1ed55ab3326b36f37943a8648cb889b6af\": container with ID starting with 654d1adc3ddf20f5867bd8c29763dc1ed55ab3326b36f37943a8648cb889b6af not found: ID does not exist" containerID="654d1adc3ddf20f5867bd8c29763dc1ed55ab3326b36f37943a8648cb889b6af" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.822907 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"654d1adc3ddf20f5867bd8c29763dc1ed55ab3326b36f37943a8648cb889b6af"} err="failed to get container status \"654d1adc3ddf20f5867bd8c29763dc1ed55ab3326b36f37943a8648cb889b6af\": rpc error: code = NotFound desc = could not find container \"654d1adc3ddf20f5867bd8c29763dc1ed55ab3326b36f37943a8648cb889b6af\": container with ID starting with 654d1adc3ddf20f5867bd8c29763dc1ed55ab3326b36f37943a8648cb889b6af not found: ID does not exist" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.825894 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.833234 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.854592 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:37:43 crc kubenswrapper[5002]: E0930 12:37:43.855002 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b44591be-b0af-4144-a646-7739c7ea1e69" containerName="horizon" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.855025 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b44591be-b0af-4144-a646-7739c7ea1e69" containerName="horizon" Sep 30 12:37:43 crc kubenswrapper[5002]: E0930 12:37:43.855063 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22178f02-1a64-4a88-a564-fe143875e7df" containerName="sg-core" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.855073 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="22178f02-1a64-4a88-a564-fe143875e7df" containerName="sg-core" Sep 30 12:37:43 crc kubenswrapper[5002]: E0930 12:37:43.855092 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b44591be-b0af-4144-a646-7739c7ea1e69" containerName="horizon-log" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.855101 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b44591be-b0af-4144-a646-7739c7ea1e69" containerName="horizon-log" Sep 30 12:37:43 crc kubenswrapper[5002]: E0930 12:37:43.855121 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22178f02-1a64-4a88-a564-fe143875e7df" containerName="ceilometer-notification-agent" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.855129 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="22178f02-1a64-4a88-a564-fe143875e7df" containerName="ceilometer-notification-agent" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.855325 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="b44591be-b0af-4144-a646-7739c7ea1e69" containerName="horizon" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.855339 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="22178f02-1a64-4a88-a564-fe143875e7df" containerName="ceilometer-notification-agent" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.855357 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="22178f02-1a64-4a88-a564-fe143875e7df" containerName="sg-core" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.855375 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="b44591be-b0af-4144-a646-7739c7ea1e69" containerName="horizon-log" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.857719 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.860320 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.860670 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.874286 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.880008 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-f8777b74-fpwh2" podUID="fd4d1d88-c894-496a-b2ee-00bf80fa2415" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.949231 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8tj5\" (UniqueName: \"kubernetes.io/projected/fa77db27-d4ee-44a1-8e63-008f66b34b48-kube-api-access-s8tj5\") pod \"ceilometer-0\" (UID: \"fa77db27-d4ee-44a1-8e63-008f66b34b48\") " pod="openstack/ceilometer-0" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.949331 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa77db27-d4ee-44a1-8e63-008f66b34b48-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fa77db27-d4ee-44a1-8e63-008f66b34b48\") " pod="openstack/ceilometer-0" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.949402 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa77db27-d4ee-44a1-8e63-008f66b34b48-config-data\") pod \"ceilometer-0\" (UID: \"fa77db27-d4ee-44a1-8e63-008f66b34b48\") " pod="openstack/ceilometer-0" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.949583 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa77db27-d4ee-44a1-8e63-008f66b34b48-run-httpd\") pod \"ceilometer-0\" (UID: \"fa77db27-d4ee-44a1-8e63-008f66b34b48\") " pod="openstack/ceilometer-0" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.949711 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa77db27-d4ee-44a1-8e63-008f66b34b48-scripts\") pod \"ceilometer-0\" (UID: \"fa77db27-d4ee-44a1-8e63-008f66b34b48\") " pod="openstack/ceilometer-0" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.949773 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa77db27-d4ee-44a1-8e63-008f66b34b48-log-httpd\") pod \"ceilometer-0\" (UID: \"fa77db27-d4ee-44a1-8e63-008f66b34b48\") " pod="openstack/ceilometer-0" Sep 30 12:37:43 crc kubenswrapper[5002]: I0930 12:37:43.949811 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fa77db27-d4ee-44a1-8e63-008f66b34b48-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fa77db27-d4ee-44a1-8e63-008f66b34b48\") " pod="openstack/ceilometer-0" Sep 30 12:37:44 crc kubenswrapper[5002]: I0930 12:37:44.051168 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8tj5\" (UniqueName: \"kubernetes.io/projected/fa77db27-d4ee-44a1-8e63-008f66b34b48-kube-api-access-s8tj5\") pod \"ceilometer-0\" (UID: \"fa77db27-d4ee-44a1-8e63-008f66b34b48\") " pod="openstack/ceilometer-0" Sep 30 12:37:44 crc kubenswrapper[5002]: I0930 12:37:44.051247 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa77db27-d4ee-44a1-8e63-008f66b34b48-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fa77db27-d4ee-44a1-8e63-008f66b34b48\") " pod="openstack/ceilometer-0" Sep 30 12:37:44 crc kubenswrapper[5002]: I0930 12:37:44.051300 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa77db27-d4ee-44a1-8e63-008f66b34b48-config-data\") pod \"ceilometer-0\" (UID: \"fa77db27-d4ee-44a1-8e63-008f66b34b48\") " pod="openstack/ceilometer-0" Sep 30 12:37:44 crc kubenswrapper[5002]: I0930 12:37:44.051661 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa77db27-d4ee-44a1-8e63-008f66b34b48-run-httpd\") pod \"ceilometer-0\" (UID: \"fa77db27-d4ee-44a1-8e63-008f66b34b48\") " pod="openstack/ceilometer-0" Sep 30 12:37:44 crc kubenswrapper[5002]: I0930 12:37:44.051899 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa77db27-d4ee-44a1-8e63-008f66b34b48-scripts\") pod \"ceilometer-0\" (UID: \"fa77db27-d4ee-44a1-8e63-008f66b34b48\") " pod="openstack/ceilometer-0" Sep 30 12:37:44 crc kubenswrapper[5002]: I0930 12:37:44.051956 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa77db27-d4ee-44a1-8e63-008f66b34b48-log-httpd\") pod \"ceilometer-0\" (UID: \"fa77db27-d4ee-44a1-8e63-008f66b34b48\") " pod="openstack/ceilometer-0" Sep 30 12:37:44 crc kubenswrapper[5002]: I0930 12:37:44.051983 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fa77db27-d4ee-44a1-8e63-008f66b34b48-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fa77db27-d4ee-44a1-8e63-008f66b34b48\") " pod="openstack/ceilometer-0" Sep 30 12:37:44 crc kubenswrapper[5002]: I0930 12:37:44.052717 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa77db27-d4ee-44a1-8e63-008f66b34b48-run-httpd\") pod \"ceilometer-0\" (UID: \"fa77db27-d4ee-44a1-8e63-008f66b34b48\") " pod="openstack/ceilometer-0" Sep 30 12:37:44 crc kubenswrapper[5002]: I0930 12:37:44.052946 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa77db27-d4ee-44a1-8e63-008f66b34b48-log-httpd\") pod \"ceilometer-0\" (UID: \"fa77db27-d4ee-44a1-8e63-008f66b34b48\") " pod="openstack/ceilometer-0" Sep 30 12:37:44 crc kubenswrapper[5002]: I0930 12:37:44.056784 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fa77db27-d4ee-44a1-8e63-008f66b34b48-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fa77db27-d4ee-44a1-8e63-008f66b34b48\") " pod="openstack/ceilometer-0" Sep 30 12:37:44 crc kubenswrapper[5002]: I0930 12:37:44.057314 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa77db27-d4ee-44a1-8e63-008f66b34b48-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fa77db27-d4ee-44a1-8e63-008f66b34b48\") " pod="openstack/ceilometer-0" Sep 30 12:37:44 crc kubenswrapper[5002]: I0930 12:37:44.057914 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa77db27-d4ee-44a1-8e63-008f66b34b48-scripts\") pod \"ceilometer-0\" (UID: \"fa77db27-d4ee-44a1-8e63-008f66b34b48\") " pod="openstack/ceilometer-0" Sep 30 12:37:44 crc kubenswrapper[5002]: I0930 12:37:44.068279 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa77db27-d4ee-44a1-8e63-008f66b34b48-config-data\") pod \"ceilometer-0\" (UID: \"fa77db27-d4ee-44a1-8e63-008f66b34b48\") " pod="openstack/ceilometer-0" Sep 30 12:37:44 crc kubenswrapper[5002]: I0930 12:37:44.070177 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8tj5\" (UniqueName: \"kubernetes.io/projected/fa77db27-d4ee-44a1-8e63-008f66b34b48-kube-api-access-s8tj5\") pod \"ceilometer-0\" (UID: \"fa77db27-d4ee-44a1-8e63-008f66b34b48\") " pod="openstack/ceilometer-0" Sep 30 12:37:44 crc kubenswrapper[5002]: I0930 12:37:44.183786 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:37:44 crc kubenswrapper[5002]: I0930 12:37:44.674813 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:37:44 crc kubenswrapper[5002]: W0930 12:37:44.679636 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfa77db27_d4ee_44a1_8e63_008f66b34b48.slice/crio-be5083d9672a96867d239e51a660b1020853d3dccf673128f2f84315ed36f414 WatchSource:0}: Error finding container be5083d9672a96867d239e51a660b1020853d3dccf673128f2f84315ed36f414: Status 404 returned error can't find the container with id be5083d9672a96867d239e51a660b1020853d3dccf673128f2f84315ed36f414 Sep 30 12:37:44 crc kubenswrapper[5002]: I0930 12:37:44.688235 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22178f02-1a64-4a88-a564-fe143875e7df" path="/var/lib/kubelet/pods/22178f02-1a64-4a88-a564-fe143875e7df/volumes" Sep 30 12:37:44 crc kubenswrapper[5002]: I0930 12:37:44.773977 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa77db27-d4ee-44a1-8e63-008f66b34b48","Type":"ContainerStarted","Data":"be5083d9672a96867d239e51a660b1020853d3dccf673128f2f84315ed36f414"} Sep 30 12:37:45 crc kubenswrapper[5002]: I0930 12:37:45.736616 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-84489c98f8-p85zv" Sep 30 12:37:46 crc kubenswrapper[5002]: I0930 12:37:46.819275 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa77db27-d4ee-44a1-8e63-008f66b34b48","Type":"ContainerStarted","Data":"04755585fe16d612a1b3517199059a4a28944e1c3c9252e203697dc993ad8e7b"} Sep 30 12:37:46 crc kubenswrapper[5002]: I0930 12:37:46.819698 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa77db27-d4ee-44a1-8e63-008f66b34b48","Type":"ContainerStarted","Data":"55b233246ee53d4d5a3b8329279bf1dbc9efff7875abe28b2346f065546eb3e4"} Sep 30 12:37:47 crc kubenswrapper[5002]: I0930 12:37:47.829038 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa77db27-d4ee-44a1-8e63-008f66b34b48","Type":"ContainerStarted","Data":"f9379ff97f831462614e289dd46f9b8c895d82de5eac97e80ea92c221b895960"} Sep 30 12:37:47 crc kubenswrapper[5002]: I0930 12:37:47.831164 5002 generic.go:334] "Generic (PLEG): container finished" podID="8687f1f5-15da-479c-8661-948b437fcb33" containerID="239dcf3d4f1d76af4b15a736976090ea497fc3006086fecbc2b06e233d1d7553" exitCode=0 Sep 30 12:37:47 crc kubenswrapper[5002]: I0930 12:37:47.831200 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-mvczr" event={"ID":"8687f1f5-15da-479c-8661-948b437fcb33","Type":"ContainerDied","Data":"239dcf3d4f1d76af4b15a736976090ea497fc3006086fecbc2b06e233d1d7553"} Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.225407 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-mvczr" Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.266072 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8687f1f5-15da-479c-8661-948b437fcb33-db-sync-config-data\") pod \"8687f1f5-15da-479c-8661-948b437fcb33\" (UID: \"8687f1f5-15da-479c-8661-948b437fcb33\") " Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.266137 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8687f1f5-15da-479c-8661-948b437fcb33-combined-ca-bundle\") pod \"8687f1f5-15da-479c-8661-948b437fcb33\" (UID: \"8687f1f5-15da-479c-8661-948b437fcb33\") " Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.266326 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-llnzh\" (UniqueName: \"kubernetes.io/projected/8687f1f5-15da-479c-8661-948b437fcb33-kube-api-access-llnzh\") pod \"8687f1f5-15da-479c-8661-948b437fcb33\" (UID: \"8687f1f5-15da-479c-8661-948b437fcb33\") " Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.271536 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8687f1f5-15da-479c-8661-948b437fcb33-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "8687f1f5-15da-479c-8661-948b437fcb33" (UID: "8687f1f5-15da-479c-8661-948b437fcb33"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.271580 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8687f1f5-15da-479c-8661-948b437fcb33-kube-api-access-llnzh" (OuterVolumeSpecName: "kube-api-access-llnzh") pod "8687f1f5-15da-479c-8661-948b437fcb33" (UID: "8687f1f5-15da-479c-8661-948b437fcb33"). InnerVolumeSpecName "kube-api-access-llnzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.294122 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8687f1f5-15da-479c-8661-948b437fcb33-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8687f1f5-15da-479c-8661-948b437fcb33" (UID: "8687f1f5-15da-479c-8661-948b437fcb33"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.368440 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-llnzh\" (UniqueName: \"kubernetes.io/projected/8687f1f5-15da-479c-8661-948b437fcb33-kube-api-access-llnzh\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.368531 5002 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8687f1f5-15da-479c-8661-948b437fcb33-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.368541 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8687f1f5-15da-479c-8661-948b437fcb33-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.779497 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Sep 30 12:37:49 crc kubenswrapper[5002]: E0930 12:37:49.780216 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8687f1f5-15da-479c-8661-948b437fcb33" containerName="barbican-db-sync" Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.780239 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="8687f1f5-15da-479c-8661-948b437fcb33" containerName="barbican-db-sync" Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.780519 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="8687f1f5-15da-479c-8661-948b437fcb33" containerName="barbican-db-sync" Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.781248 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.783719 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.794462 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.795862 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-b7ttk" Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.797231 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.851608 5002 generic.go:334] "Generic (PLEG): container finished" podID="d52650cc-2190-4b6f-8a3f-f506262075d8" containerID="8a95094e63785df2c49d0117ed1d9bc8bff30ac583aa71825afe7b2e3d5ed8b6" exitCode=0 Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.851696 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-qwvj9" event={"ID":"d52650cc-2190-4b6f-8a3f-f506262075d8","Type":"ContainerDied","Data":"8a95094e63785df2c49d0117ed1d9bc8bff30ac583aa71825afe7b2e3d5ed8b6"} Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.854137 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-mvczr" event={"ID":"8687f1f5-15da-479c-8661-948b437fcb33","Type":"ContainerDied","Data":"cb6d5712c0eaf26bdee35205fb05d670bdbae107d7425cf1f8998c2c05aa8af8"} Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.854173 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb6d5712c0eaf26bdee35205fb05d670bdbae107d7425cf1f8998c2c05aa8af8" Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.854230 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-mvczr" Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.859829 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa77db27-d4ee-44a1-8e63-008f66b34b48","Type":"ContainerStarted","Data":"c36949a6b43b6c3f905e7f9919a3b739c026f04aa774295ff284f8de300b226b"} Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.860303 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.877312 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5g4z2\" (UniqueName: \"kubernetes.io/projected/7f7eb2da-349a-49e9-9e42-7e61b36682f0-kube-api-access-5g4z2\") pod \"openstackclient\" (UID: \"7f7eb2da-349a-49e9-9e42-7e61b36682f0\") " pod="openstack/openstackclient" Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.877415 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7f7eb2da-349a-49e9-9e42-7e61b36682f0-openstack-config\") pod \"openstackclient\" (UID: \"7f7eb2da-349a-49e9-9e42-7e61b36682f0\") " pod="openstack/openstackclient" Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.877450 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7f7eb2da-349a-49e9-9e42-7e61b36682f0-openstack-config-secret\") pod \"openstackclient\" (UID: \"7f7eb2da-349a-49e9-9e42-7e61b36682f0\") " pod="openstack/openstackclient" Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.877579 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f7eb2da-349a-49e9-9e42-7e61b36682f0-combined-ca-bundle\") pod \"openstackclient\" (UID: \"7f7eb2da-349a-49e9-9e42-7e61b36682f0\") " pod="openstack/openstackclient" Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.898824 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.944373147 podStartE2EDuration="6.898807081s" podCreationTimestamp="2025-09-30 12:37:43 +0000 UTC" firstStartedPulling="2025-09-30 12:37:44.68307142 +0000 UTC m=+1038.932753566" lastFinishedPulling="2025-09-30 12:37:48.637505344 +0000 UTC m=+1042.887187500" observedRunningTime="2025-09-30 12:37:49.897432286 +0000 UTC m=+1044.147114472" watchObservedRunningTime="2025-09-30 12:37:49.898807081 +0000 UTC m=+1044.148489227" Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.979047 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7f7eb2da-349a-49e9-9e42-7e61b36682f0-openstack-config\") pod \"openstackclient\" (UID: \"7f7eb2da-349a-49e9-9e42-7e61b36682f0\") " pod="openstack/openstackclient" Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.979101 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7f7eb2da-349a-49e9-9e42-7e61b36682f0-openstack-config-secret\") pod \"openstackclient\" (UID: \"7f7eb2da-349a-49e9-9e42-7e61b36682f0\") " pod="openstack/openstackclient" Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.979220 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f7eb2da-349a-49e9-9e42-7e61b36682f0-combined-ca-bundle\") pod \"openstackclient\" (UID: \"7f7eb2da-349a-49e9-9e42-7e61b36682f0\") " pod="openstack/openstackclient" Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.979268 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5g4z2\" (UniqueName: \"kubernetes.io/projected/7f7eb2da-349a-49e9-9e42-7e61b36682f0-kube-api-access-5g4z2\") pod \"openstackclient\" (UID: \"7f7eb2da-349a-49e9-9e42-7e61b36682f0\") " pod="openstack/openstackclient" Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.980436 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7f7eb2da-349a-49e9-9e42-7e61b36682f0-openstack-config\") pod \"openstackclient\" (UID: \"7f7eb2da-349a-49e9-9e42-7e61b36682f0\") " pod="openstack/openstackclient" Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.983452 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f7eb2da-349a-49e9-9e42-7e61b36682f0-combined-ca-bundle\") pod \"openstackclient\" (UID: \"7f7eb2da-349a-49e9-9e42-7e61b36682f0\") " pod="openstack/openstackclient" Sep 30 12:37:49 crc kubenswrapper[5002]: I0930 12:37:49.984335 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7f7eb2da-349a-49e9-9e42-7e61b36682f0-openstack-config-secret\") pod \"openstackclient\" (UID: \"7f7eb2da-349a-49e9-9e42-7e61b36682f0\") " pod="openstack/openstackclient" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.011811 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5g4z2\" (UniqueName: \"kubernetes.io/projected/7f7eb2da-349a-49e9-9e42-7e61b36682f0-kube-api-access-5g4z2\") pod \"openstackclient\" (UID: \"7f7eb2da-349a-49e9-9e42-7e61b36682f0\") " pod="openstack/openstackclient" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.090572 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.093073 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.158604 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.195705 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-5976c988bc-j7gh5"] Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.197173 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5976c988bc-j7gh5" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.221059 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.221282 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-dzvks" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.221451 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.230891 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.232257 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.232418 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-64bf5676fd-f8zrr"] Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.233750 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-64bf5676fd-f8zrr" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.265838 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.274553 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5976c988bc-j7gh5"] Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.291122 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/369a872a-8dd2-409e-9938-2a01cd707dc8-combined-ca-bundle\") pod \"openstackclient\" (UID: \"369a872a-8dd2-409e-9938-2a01cd707dc8\") " pod="openstack/openstackclient" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.291188 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/295327d8-7973-4826-a8ab-34dcf2f4b5d5-logs\") pod \"barbican-worker-5976c988bc-j7gh5\" (UID: \"295327d8-7973-4826-a8ab-34dcf2f4b5d5\") " pod="openstack/barbican-worker-5976c988bc-j7gh5" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.291214 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc432228-1ae9-4a76-a81f-a8a7d2d44492-logs\") pod \"barbican-keystone-listener-64bf5676fd-f8zrr\" (UID: \"fc432228-1ae9-4a76-a81f-a8a7d2d44492\") " pod="openstack/barbican-keystone-listener-64bf5676fd-f8zrr" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.291252 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc432228-1ae9-4a76-a81f-a8a7d2d44492-config-data\") pod \"barbican-keystone-listener-64bf5676fd-f8zrr\" (UID: \"fc432228-1ae9-4a76-a81f-a8a7d2d44492\") " pod="openstack/barbican-keystone-listener-64bf5676fd-f8zrr" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.291279 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6z7nb\" (UniqueName: \"kubernetes.io/projected/295327d8-7973-4826-a8ab-34dcf2f4b5d5-kube-api-access-6z7nb\") pod \"barbican-worker-5976c988bc-j7gh5\" (UID: \"295327d8-7973-4826-a8ab-34dcf2f4b5d5\") " pod="openstack/barbican-worker-5976c988bc-j7gh5" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.291300 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thmfc\" (UniqueName: \"kubernetes.io/projected/fc432228-1ae9-4a76-a81f-a8a7d2d44492-kube-api-access-thmfc\") pod \"barbican-keystone-listener-64bf5676fd-f8zrr\" (UID: \"fc432228-1ae9-4a76-a81f-a8a7d2d44492\") " pod="openstack/barbican-keystone-listener-64bf5676fd-f8zrr" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.291330 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/295327d8-7973-4826-a8ab-34dcf2f4b5d5-combined-ca-bundle\") pod \"barbican-worker-5976c988bc-j7gh5\" (UID: \"295327d8-7973-4826-a8ab-34dcf2f4b5d5\") " pod="openstack/barbican-worker-5976c988bc-j7gh5" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.291353 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/295327d8-7973-4826-a8ab-34dcf2f4b5d5-config-data\") pod \"barbican-worker-5976c988bc-j7gh5\" (UID: \"295327d8-7973-4826-a8ab-34dcf2f4b5d5\") " pod="openstack/barbican-worker-5976c988bc-j7gh5" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.291389 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6m2l\" (UniqueName: \"kubernetes.io/projected/369a872a-8dd2-409e-9938-2a01cd707dc8-kube-api-access-r6m2l\") pod \"openstackclient\" (UID: \"369a872a-8dd2-409e-9938-2a01cd707dc8\") " pod="openstack/openstackclient" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.291427 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/295327d8-7973-4826-a8ab-34dcf2f4b5d5-config-data-custom\") pod \"barbican-worker-5976c988bc-j7gh5\" (UID: \"295327d8-7973-4826-a8ab-34dcf2f4b5d5\") " pod="openstack/barbican-worker-5976c988bc-j7gh5" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.291485 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/369a872a-8dd2-409e-9938-2a01cd707dc8-openstack-config\") pod \"openstackclient\" (UID: \"369a872a-8dd2-409e-9938-2a01cd707dc8\") " pod="openstack/openstackclient" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.291525 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/369a872a-8dd2-409e-9938-2a01cd707dc8-openstack-config-secret\") pod \"openstackclient\" (UID: \"369a872a-8dd2-409e-9938-2a01cd707dc8\") " pod="openstack/openstackclient" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.291556 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fc432228-1ae9-4a76-a81f-a8a7d2d44492-config-data-custom\") pod \"barbican-keystone-listener-64bf5676fd-f8zrr\" (UID: \"fc432228-1ae9-4a76-a81f-a8a7d2d44492\") " pod="openstack/barbican-keystone-listener-64bf5676fd-f8zrr" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.291581 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc432228-1ae9-4a76-a81f-a8a7d2d44492-combined-ca-bundle\") pod \"barbican-keystone-listener-64bf5676fd-f8zrr\" (UID: \"fc432228-1ae9-4a76-a81f-a8a7d2d44492\") " pod="openstack/barbican-keystone-listener-64bf5676fd-f8zrr" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.327702 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 30 12:37:50 crc kubenswrapper[5002]: E0930 12:37:50.333646 5002 log.go:32] "RunPodSandbox from runtime service failed" err=< Sep 30 12:37:50 crc kubenswrapper[5002]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_7f7eb2da-349a-49e9-9e42-7e61b36682f0_0(80b4fe1c76f9582a312470ad941cec03d0a1f14fddea28f15048e2e4fc6123c6): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"80b4fe1c76f9582a312470ad941cec03d0a1f14fddea28f15048e2e4fc6123c6" Netns:"/var/run/netns/dfafec14-4988-484b-81a6-fed1a564280e" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=80b4fe1c76f9582a312470ad941cec03d0a1f14fddea28f15048e2e4fc6123c6;K8S_POD_UID=7f7eb2da-349a-49e9-9e42-7e61b36682f0" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/7f7eb2da-349a-49e9-9e42-7e61b36682f0]: expected pod UID "7f7eb2da-349a-49e9-9e42-7e61b36682f0" but got "369a872a-8dd2-409e-9938-2a01cd707dc8" from Kube API Sep 30 12:37:50 crc kubenswrapper[5002]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Sep 30 12:37:50 crc kubenswrapper[5002]: > Sep 30 12:37:50 crc kubenswrapper[5002]: E0930 12:37:50.333719 5002 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Sep 30 12:37:50 crc kubenswrapper[5002]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_7f7eb2da-349a-49e9-9e42-7e61b36682f0_0(80b4fe1c76f9582a312470ad941cec03d0a1f14fddea28f15048e2e4fc6123c6): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"80b4fe1c76f9582a312470ad941cec03d0a1f14fddea28f15048e2e4fc6123c6" Netns:"/var/run/netns/dfafec14-4988-484b-81a6-fed1a564280e" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=80b4fe1c76f9582a312470ad941cec03d0a1f14fddea28f15048e2e4fc6123c6;K8S_POD_UID=7f7eb2da-349a-49e9-9e42-7e61b36682f0" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/7f7eb2da-349a-49e9-9e42-7e61b36682f0]: expected pod UID "7f7eb2da-349a-49e9-9e42-7e61b36682f0" but got "369a872a-8dd2-409e-9938-2a01cd707dc8" from Kube API Sep 30 12:37:50 crc kubenswrapper[5002]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Sep 30 12:37:50 crc kubenswrapper[5002]: > pod="openstack/openstackclient" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.376757 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-64bf5676fd-f8zrr"] Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.393325 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/295327d8-7973-4826-a8ab-34dcf2f4b5d5-logs\") pod \"barbican-worker-5976c988bc-j7gh5\" (UID: \"295327d8-7973-4826-a8ab-34dcf2f4b5d5\") " pod="openstack/barbican-worker-5976c988bc-j7gh5" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.393372 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc432228-1ae9-4a76-a81f-a8a7d2d44492-logs\") pod \"barbican-keystone-listener-64bf5676fd-f8zrr\" (UID: \"fc432228-1ae9-4a76-a81f-a8a7d2d44492\") " pod="openstack/barbican-keystone-listener-64bf5676fd-f8zrr" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.393411 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc432228-1ae9-4a76-a81f-a8a7d2d44492-config-data\") pod \"barbican-keystone-listener-64bf5676fd-f8zrr\" (UID: \"fc432228-1ae9-4a76-a81f-a8a7d2d44492\") " pod="openstack/barbican-keystone-listener-64bf5676fd-f8zrr" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.393429 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6z7nb\" (UniqueName: \"kubernetes.io/projected/295327d8-7973-4826-a8ab-34dcf2f4b5d5-kube-api-access-6z7nb\") pod \"barbican-worker-5976c988bc-j7gh5\" (UID: \"295327d8-7973-4826-a8ab-34dcf2f4b5d5\") " pod="openstack/barbican-worker-5976c988bc-j7gh5" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.393448 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thmfc\" (UniqueName: \"kubernetes.io/projected/fc432228-1ae9-4a76-a81f-a8a7d2d44492-kube-api-access-thmfc\") pod \"barbican-keystone-listener-64bf5676fd-f8zrr\" (UID: \"fc432228-1ae9-4a76-a81f-a8a7d2d44492\") " pod="openstack/barbican-keystone-listener-64bf5676fd-f8zrr" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.393487 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/295327d8-7973-4826-a8ab-34dcf2f4b5d5-combined-ca-bundle\") pod \"barbican-worker-5976c988bc-j7gh5\" (UID: \"295327d8-7973-4826-a8ab-34dcf2f4b5d5\") " pod="openstack/barbican-worker-5976c988bc-j7gh5" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.393505 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/295327d8-7973-4826-a8ab-34dcf2f4b5d5-config-data\") pod \"barbican-worker-5976c988bc-j7gh5\" (UID: \"295327d8-7973-4826-a8ab-34dcf2f4b5d5\") " pod="openstack/barbican-worker-5976c988bc-j7gh5" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.393535 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6m2l\" (UniqueName: \"kubernetes.io/projected/369a872a-8dd2-409e-9938-2a01cd707dc8-kube-api-access-r6m2l\") pod \"openstackclient\" (UID: \"369a872a-8dd2-409e-9938-2a01cd707dc8\") " pod="openstack/openstackclient" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.393560 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/295327d8-7973-4826-a8ab-34dcf2f4b5d5-config-data-custom\") pod \"barbican-worker-5976c988bc-j7gh5\" (UID: \"295327d8-7973-4826-a8ab-34dcf2f4b5d5\") " pod="openstack/barbican-worker-5976c988bc-j7gh5" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.393587 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/369a872a-8dd2-409e-9938-2a01cd707dc8-openstack-config\") pod \"openstackclient\" (UID: \"369a872a-8dd2-409e-9938-2a01cd707dc8\") " pod="openstack/openstackclient" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.393614 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/369a872a-8dd2-409e-9938-2a01cd707dc8-openstack-config-secret\") pod \"openstackclient\" (UID: \"369a872a-8dd2-409e-9938-2a01cd707dc8\") " pod="openstack/openstackclient" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.393640 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fc432228-1ae9-4a76-a81f-a8a7d2d44492-config-data-custom\") pod \"barbican-keystone-listener-64bf5676fd-f8zrr\" (UID: \"fc432228-1ae9-4a76-a81f-a8a7d2d44492\") " pod="openstack/barbican-keystone-listener-64bf5676fd-f8zrr" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.393662 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc432228-1ae9-4a76-a81f-a8a7d2d44492-combined-ca-bundle\") pod \"barbican-keystone-listener-64bf5676fd-f8zrr\" (UID: \"fc432228-1ae9-4a76-a81f-a8a7d2d44492\") " pod="openstack/barbican-keystone-listener-64bf5676fd-f8zrr" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.393699 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/369a872a-8dd2-409e-9938-2a01cd707dc8-combined-ca-bundle\") pod \"openstackclient\" (UID: \"369a872a-8dd2-409e-9938-2a01cd707dc8\") " pod="openstack/openstackclient" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.408490 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/369a872a-8dd2-409e-9938-2a01cd707dc8-openstack-config\") pod \"openstackclient\" (UID: \"369a872a-8dd2-409e-9938-2a01cd707dc8\") " pod="openstack/openstackclient" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.429283 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/295327d8-7973-4826-a8ab-34dcf2f4b5d5-logs\") pod \"barbican-worker-5976c988bc-j7gh5\" (UID: \"295327d8-7973-4826-a8ab-34dcf2f4b5d5\") " pod="openstack/barbican-worker-5976c988bc-j7gh5" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.430016 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc432228-1ae9-4a76-a81f-a8a7d2d44492-logs\") pod \"barbican-keystone-listener-64bf5676fd-f8zrr\" (UID: \"fc432228-1ae9-4a76-a81f-a8a7d2d44492\") " pod="openstack/barbican-keystone-listener-64bf5676fd-f8zrr" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.431939 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc432228-1ae9-4a76-a81f-a8a7d2d44492-config-data\") pod \"barbican-keystone-listener-64bf5676fd-f8zrr\" (UID: \"fc432228-1ae9-4a76-a81f-a8a7d2d44492\") " pod="openstack/barbican-keystone-listener-64bf5676fd-f8zrr" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.456987 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6z7nb\" (UniqueName: \"kubernetes.io/projected/295327d8-7973-4826-a8ab-34dcf2f4b5d5-kube-api-access-6z7nb\") pod \"barbican-worker-5976c988bc-j7gh5\" (UID: \"295327d8-7973-4826-a8ab-34dcf2f4b5d5\") " pod="openstack/barbican-worker-5976c988bc-j7gh5" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.467828 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thmfc\" (UniqueName: \"kubernetes.io/projected/fc432228-1ae9-4a76-a81f-a8a7d2d44492-kube-api-access-thmfc\") pod \"barbican-keystone-listener-64bf5676fd-f8zrr\" (UID: \"fc432228-1ae9-4a76-a81f-a8a7d2d44492\") " pod="openstack/barbican-keystone-listener-64bf5676fd-f8zrr" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.474993 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6m2l\" (UniqueName: \"kubernetes.io/projected/369a872a-8dd2-409e-9938-2a01cd707dc8-kube-api-access-r6m2l\") pod \"openstackclient\" (UID: \"369a872a-8dd2-409e-9938-2a01cd707dc8\") " pod="openstack/openstackclient" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.489027 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fc432228-1ae9-4a76-a81f-a8a7d2d44492-config-data-custom\") pod \"barbican-keystone-listener-64bf5676fd-f8zrr\" (UID: \"fc432228-1ae9-4a76-a81f-a8a7d2d44492\") " pod="openstack/barbican-keystone-listener-64bf5676fd-f8zrr" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.489531 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/295327d8-7973-4826-a8ab-34dcf2f4b5d5-config-data-custom\") pod \"barbican-worker-5976c988bc-j7gh5\" (UID: \"295327d8-7973-4826-a8ab-34dcf2f4b5d5\") " pod="openstack/barbican-worker-5976c988bc-j7gh5" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.492194 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/369a872a-8dd2-409e-9938-2a01cd707dc8-openstack-config-secret\") pod \"openstackclient\" (UID: \"369a872a-8dd2-409e-9938-2a01cd707dc8\") " pod="openstack/openstackclient" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.492658 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc432228-1ae9-4a76-a81f-a8a7d2d44492-combined-ca-bundle\") pod \"barbican-keystone-listener-64bf5676fd-f8zrr\" (UID: \"fc432228-1ae9-4a76-a81f-a8a7d2d44492\") " pod="openstack/barbican-keystone-listener-64bf5676fd-f8zrr" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.497972 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/295327d8-7973-4826-a8ab-34dcf2f4b5d5-config-data\") pod \"barbican-worker-5976c988bc-j7gh5\" (UID: \"295327d8-7973-4826-a8ab-34dcf2f4b5d5\") " pod="openstack/barbican-worker-5976c988bc-j7gh5" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.500656 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/369a872a-8dd2-409e-9938-2a01cd707dc8-combined-ca-bundle\") pod \"openstackclient\" (UID: \"369a872a-8dd2-409e-9938-2a01cd707dc8\") " pod="openstack/openstackclient" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.500788 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/295327d8-7973-4826-a8ab-34dcf2f4b5d5-combined-ca-bundle\") pod \"barbican-worker-5976c988bc-j7gh5\" (UID: \"295327d8-7973-4826-a8ab-34dcf2f4b5d5\") " pod="openstack/barbican-worker-5976c988bc-j7gh5" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.505888 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-64bf5676fd-f8zrr" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.509642 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59d5ff467f-kv9dn"] Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.519589 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d5ff467f-kv9dn" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.556728 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59d5ff467f-kv9dn"] Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.584732 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-86cd8676db-mltgz"] Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.586323 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-86cd8676db-mltgz" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.588250 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.593467 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-86cd8676db-mltgz"] Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.600063 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/beec5517-fd48-4017-9cb3-1102001e9439-config-data\") pod \"barbican-api-86cd8676db-mltgz\" (UID: \"beec5517-fd48-4017-9cb3-1102001e9439\") " pod="openstack/barbican-api-86cd8676db-mltgz" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.600162 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/beec5517-fd48-4017-9cb3-1102001e9439-logs\") pod \"barbican-api-86cd8676db-mltgz\" (UID: \"beec5517-fd48-4017-9cb3-1102001e9439\") " pod="openstack/barbican-api-86cd8676db-mltgz" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.600202 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-dns-swift-storage-0\") pod \"dnsmasq-dns-59d5ff467f-kv9dn\" (UID: \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\") " pod="openstack/dnsmasq-dns-59d5ff467f-kv9dn" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.600250 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/beec5517-fd48-4017-9cb3-1102001e9439-combined-ca-bundle\") pod \"barbican-api-86cd8676db-mltgz\" (UID: \"beec5517-fd48-4017-9cb3-1102001e9439\") " pod="openstack/barbican-api-86cd8676db-mltgz" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.600275 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-ovsdbserver-nb\") pod \"dnsmasq-dns-59d5ff467f-kv9dn\" (UID: \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\") " pod="openstack/dnsmasq-dns-59d5ff467f-kv9dn" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.600300 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffchm\" (UniqueName: \"kubernetes.io/projected/87fd4f9e-e9c3-4427-884e-44dc39055ab4-kube-api-access-ffchm\") pod \"dnsmasq-dns-59d5ff467f-kv9dn\" (UID: \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\") " pod="openstack/dnsmasq-dns-59d5ff467f-kv9dn" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.600349 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-ovsdbserver-sb\") pod \"dnsmasq-dns-59d5ff467f-kv9dn\" (UID: \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\") " pod="openstack/dnsmasq-dns-59d5ff467f-kv9dn" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.600392 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vn8p\" (UniqueName: \"kubernetes.io/projected/beec5517-fd48-4017-9cb3-1102001e9439-kube-api-access-7vn8p\") pod \"barbican-api-86cd8676db-mltgz\" (UID: \"beec5517-fd48-4017-9cb3-1102001e9439\") " pod="openstack/barbican-api-86cd8676db-mltgz" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.600413 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/beec5517-fd48-4017-9cb3-1102001e9439-config-data-custom\") pod \"barbican-api-86cd8676db-mltgz\" (UID: \"beec5517-fd48-4017-9cb3-1102001e9439\") " pod="openstack/barbican-api-86cd8676db-mltgz" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.600438 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-config\") pod \"dnsmasq-dns-59d5ff467f-kv9dn\" (UID: \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\") " pod="openstack/dnsmasq-dns-59d5ff467f-kv9dn" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.600491 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-dns-svc\") pod \"dnsmasq-dns-59d5ff467f-kv9dn\" (UID: \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\") " pod="openstack/dnsmasq-dns-59d5ff467f-kv9dn" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.681951 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5976c988bc-j7gh5" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.701201 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffchm\" (UniqueName: \"kubernetes.io/projected/87fd4f9e-e9c3-4427-884e-44dc39055ab4-kube-api-access-ffchm\") pod \"dnsmasq-dns-59d5ff467f-kv9dn\" (UID: \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\") " pod="openstack/dnsmasq-dns-59d5ff467f-kv9dn" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.701255 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-ovsdbserver-sb\") pod \"dnsmasq-dns-59d5ff467f-kv9dn\" (UID: \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\") " pod="openstack/dnsmasq-dns-59d5ff467f-kv9dn" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.701291 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vn8p\" (UniqueName: \"kubernetes.io/projected/beec5517-fd48-4017-9cb3-1102001e9439-kube-api-access-7vn8p\") pod \"barbican-api-86cd8676db-mltgz\" (UID: \"beec5517-fd48-4017-9cb3-1102001e9439\") " pod="openstack/barbican-api-86cd8676db-mltgz" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.701307 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/beec5517-fd48-4017-9cb3-1102001e9439-config-data-custom\") pod \"barbican-api-86cd8676db-mltgz\" (UID: \"beec5517-fd48-4017-9cb3-1102001e9439\") " pod="openstack/barbican-api-86cd8676db-mltgz" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.701327 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-config\") pod \"dnsmasq-dns-59d5ff467f-kv9dn\" (UID: \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\") " pod="openstack/dnsmasq-dns-59d5ff467f-kv9dn" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.701352 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-dns-svc\") pod \"dnsmasq-dns-59d5ff467f-kv9dn\" (UID: \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\") " pod="openstack/dnsmasq-dns-59d5ff467f-kv9dn" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.701374 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/beec5517-fd48-4017-9cb3-1102001e9439-config-data\") pod \"barbican-api-86cd8676db-mltgz\" (UID: \"beec5517-fd48-4017-9cb3-1102001e9439\") " pod="openstack/barbican-api-86cd8676db-mltgz" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.701422 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/beec5517-fd48-4017-9cb3-1102001e9439-logs\") pod \"barbican-api-86cd8676db-mltgz\" (UID: \"beec5517-fd48-4017-9cb3-1102001e9439\") " pod="openstack/barbican-api-86cd8676db-mltgz" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.701448 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-dns-swift-storage-0\") pod \"dnsmasq-dns-59d5ff467f-kv9dn\" (UID: \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\") " pod="openstack/dnsmasq-dns-59d5ff467f-kv9dn" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.701466 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/beec5517-fd48-4017-9cb3-1102001e9439-combined-ca-bundle\") pod \"barbican-api-86cd8676db-mltgz\" (UID: \"beec5517-fd48-4017-9cb3-1102001e9439\") " pod="openstack/barbican-api-86cd8676db-mltgz" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.701514 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-ovsdbserver-nb\") pod \"dnsmasq-dns-59d5ff467f-kv9dn\" (UID: \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\") " pod="openstack/dnsmasq-dns-59d5ff467f-kv9dn" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.702362 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-ovsdbserver-nb\") pod \"dnsmasq-dns-59d5ff467f-kv9dn\" (UID: \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\") " pod="openstack/dnsmasq-dns-59d5ff467f-kv9dn" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.703164 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-ovsdbserver-sb\") pod \"dnsmasq-dns-59d5ff467f-kv9dn\" (UID: \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\") " pod="openstack/dnsmasq-dns-59d5ff467f-kv9dn" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.707916 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-dns-swift-storage-0\") pod \"dnsmasq-dns-59d5ff467f-kv9dn\" (UID: \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\") " pod="openstack/dnsmasq-dns-59d5ff467f-kv9dn" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.708363 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-config\") pod \"dnsmasq-dns-59d5ff467f-kv9dn\" (UID: \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\") " pod="openstack/dnsmasq-dns-59d5ff467f-kv9dn" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.708815 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/beec5517-fd48-4017-9cb3-1102001e9439-logs\") pod \"barbican-api-86cd8676db-mltgz\" (UID: \"beec5517-fd48-4017-9cb3-1102001e9439\") " pod="openstack/barbican-api-86cd8676db-mltgz" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.708859 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-dns-svc\") pod \"dnsmasq-dns-59d5ff467f-kv9dn\" (UID: \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\") " pod="openstack/dnsmasq-dns-59d5ff467f-kv9dn" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.709549 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/beec5517-fd48-4017-9cb3-1102001e9439-config-data-custom\") pod \"barbican-api-86cd8676db-mltgz\" (UID: \"beec5517-fd48-4017-9cb3-1102001e9439\") " pod="openstack/barbican-api-86cd8676db-mltgz" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.710059 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/beec5517-fd48-4017-9cb3-1102001e9439-config-data\") pod \"barbican-api-86cd8676db-mltgz\" (UID: \"beec5517-fd48-4017-9cb3-1102001e9439\") " pod="openstack/barbican-api-86cd8676db-mltgz" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.717648 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/beec5517-fd48-4017-9cb3-1102001e9439-combined-ca-bundle\") pod \"barbican-api-86cd8676db-mltgz\" (UID: \"beec5517-fd48-4017-9cb3-1102001e9439\") " pod="openstack/barbican-api-86cd8676db-mltgz" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.724291 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffchm\" (UniqueName: \"kubernetes.io/projected/87fd4f9e-e9c3-4427-884e-44dc39055ab4-kube-api-access-ffchm\") pod \"dnsmasq-dns-59d5ff467f-kv9dn\" (UID: \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\") " pod="openstack/dnsmasq-dns-59d5ff467f-kv9dn" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.725200 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vn8p\" (UniqueName: \"kubernetes.io/projected/beec5517-fd48-4017-9cb3-1102001e9439-kube-api-access-7vn8p\") pod \"barbican-api-86cd8676db-mltgz\" (UID: \"beec5517-fd48-4017-9cb3-1102001e9439\") " pod="openstack/barbican-api-86cd8676db-mltgz" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.764342 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.844859 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d5ff467f-kv9dn" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.880121 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.893059 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.895914 5002 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="7f7eb2da-349a-49e9-9e42-7e61b36682f0" podUID="369a872a-8dd2-409e-9938-2a01cd707dc8" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.902234 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-86cd8676db-mltgz" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.903623 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7f7eb2da-349a-49e9-9e42-7e61b36682f0-openstack-config-secret\") pod \"7f7eb2da-349a-49e9-9e42-7e61b36682f0\" (UID: \"7f7eb2da-349a-49e9-9e42-7e61b36682f0\") " Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.903690 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f7eb2da-349a-49e9-9e42-7e61b36682f0-combined-ca-bundle\") pod \"7f7eb2da-349a-49e9-9e42-7e61b36682f0\" (UID: \"7f7eb2da-349a-49e9-9e42-7e61b36682f0\") " Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.903722 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5g4z2\" (UniqueName: \"kubernetes.io/projected/7f7eb2da-349a-49e9-9e42-7e61b36682f0-kube-api-access-5g4z2\") pod \"7f7eb2da-349a-49e9-9e42-7e61b36682f0\" (UID: \"7f7eb2da-349a-49e9-9e42-7e61b36682f0\") " Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.903785 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7f7eb2da-349a-49e9-9e42-7e61b36682f0-openstack-config\") pod \"7f7eb2da-349a-49e9-9e42-7e61b36682f0\" (UID: \"7f7eb2da-349a-49e9-9e42-7e61b36682f0\") " Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.909364 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f7eb2da-349a-49e9-9e42-7e61b36682f0-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "7f7eb2da-349a-49e9-9e42-7e61b36682f0" (UID: "7f7eb2da-349a-49e9-9e42-7e61b36682f0"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.909883 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f7eb2da-349a-49e9-9e42-7e61b36682f0-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "7f7eb2da-349a-49e9-9e42-7e61b36682f0" (UID: "7f7eb2da-349a-49e9-9e42-7e61b36682f0"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.914860 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f7eb2da-349a-49e9-9e42-7e61b36682f0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7f7eb2da-349a-49e9-9e42-7e61b36682f0" (UID: "7f7eb2da-349a-49e9-9e42-7e61b36682f0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:50 crc kubenswrapper[5002]: I0930 12:37:50.914949 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f7eb2da-349a-49e9-9e42-7e61b36682f0-kube-api-access-5g4z2" (OuterVolumeSpecName: "kube-api-access-5g4z2") pod "7f7eb2da-349a-49e9-9e42-7e61b36682f0" (UID: "7f7eb2da-349a-49e9-9e42-7e61b36682f0"). InnerVolumeSpecName "kube-api-access-5g4z2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.005232 5002 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7f7eb2da-349a-49e9-9e42-7e61b36682f0-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.005269 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f7eb2da-349a-49e9-9e42-7e61b36682f0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.005278 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5g4z2\" (UniqueName: \"kubernetes.io/projected/7f7eb2da-349a-49e9-9e42-7e61b36682f0-kube-api-access-5g4z2\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.005286 5002 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7f7eb2da-349a-49e9-9e42-7e61b36682f0-openstack-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.070207 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-64bf5676fd-f8zrr"] Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.281727 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5976c988bc-j7gh5"] Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.381420 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.391046 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-qwvj9" Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.501504 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59d5ff467f-kv9dn"] Sep 30 12:37:51 crc kubenswrapper[5002]: W0930 12:37:51.508009 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod87fd4f9e_e9c3_4427_884e_44dc39055ab4.slice/crio-2b5d02346c239a5849fb819ef3820e81647dc04d6468f8593bddd651dc54d9bb WatchSource:0}: Error finding container 2b5d02346c239a5849fb819ef3820e81647dc04d6468f8593bddd651dc54d9bb: Status 404 returned error can't find the container with id 2b5d02346c239a5849fb819ef3820e81647dc04d6468f8593bddd651dc54d9bb Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.517027 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-42xst\" (UniqueName: \"kubernetes.io/projected/d52650cc-2190-4b6f-8a3f-f506262075d8-kube-api-access-42xst\") pod \"d52650cc-2190-4b6f-8a3f-f506262075d8\" (UID: \"d52650cc-2190-4b6f-8a3f-f506262075d8\") " Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.517177 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d52650cc-2190-4b6f-8a3f-f506262075d8-combined-ca-bundle\") pod \"d52650cc-2190-4b6f-8a3f-f506262075d8\" (UID: \"d52650cc-2190-4b6f-8a3f-f506262075d8\") " Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.517262 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d52650cc-2190-4b6f-8a3f-f506262075d8-config\") pod \"d52650cc-2190-4b6f-8a3f-f506262075d8\" (UID: \"d52650cc-2190-4b6f-8a3f-f506262075d8\") " Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.521377 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d52650cc-2190-4b6f-8a3f-f506262075d8-kube-api-access-42xst" (OuterVolumeSpecName: "kube-api-access-42xst") pod "d52650cc-2190-4b6f-8a3f-f506262075d8" (UID: "d52650cc-2190-4b6f-8a3f-f506262075d8"). InnerVolumeSpecName "kube-api-access-42xst". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.551667 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d52650cc-2190-4b6f-8a3f-f506262075d8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d52650cc-2190-4b6f-8a3f-f506262075d8" (UID: "d52650cc-2190-4b6f-8a3f-f506262075d8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.609443 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d52650cc-2190-4b6f-8a3f-f506262075d8-config" (OuterVolumeSpecName: "config") pod "d52650cc-2190-4b6f-8a3f-f506262075d8" (UID: "d52650cc-2190-4b6f-8a3f-f506262075d8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.621056 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-42xst\" (UniqueName: \"kubernetes.io/projected/d52650cc-2190-4b6f-8a3f-f506262075d8-kube-api-access-42xst\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.621089 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d52650cc-2190-4b6f-8a3f-f506262075d8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.621099 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/d52650cc-2190-4b6f-8a3f-f506262075d8-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.651089 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-86cd8676db-mltgz"] Sep 30 12:37:51 crc kubenswrapper[5002]: W0930 12:37:51.657678 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbeec5517_fd48_4017_9cb3_1102001e9439.slice/crio-6704f3167390e0d9317ad30e3d1665ea25c069e3e46c633a55eb726dca823457 WatchSource:0}: Error finding container 6704f3167390e0d9317ad30e3d1665ea25c069e3e46c633a55eb726dca823457: Status 404 returned error can't find the container with id 6704f3167390e0d9317ad30e3d1665ea25c069e3e46c633a55eb726dca823457 Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.905494 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-qwvj9" event={"ID":"d52650cc-2190-4b6f-8a3f-f506262075d8","Type":"ContainerDied","Data":"9ffa9a9f49b70ab244e75af3ed34c8db009cbefcc0d031929dfd1065d975d8b9"} Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.905791 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9ffa9a9f49b70ab244e75af3ed34c8db009cbefcc0d031929dfd1065d975d8b9" Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.905882 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-qwvj9" Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.915864 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5976c988bc-j7gh5" event={"ID":"295327d8-7973-4826-a8ab-34dcf2f4b5d5","Type":"ContainerStarted","Data":"cce91d1f574c75e35236f48c067ed44c6e64b3cf7e130e7670f7a2b58d4892d4"} Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.923209 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d5ff467f-kv9dn" event={"ID":"87fd4f9e-e9c3-4427-884e-44dc39055ab4","Type":"ContainerDied","Data":"bb38557ce6c2a3dde0cbb5ce531c3a5a07ede5241b7111e3b74c8ba77a757510"} Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.922531 5002 generic.go:334] "Generic (PLEG): container finished" podID="87fd4f9e-e9c3-4427-884e-44dc39055ab4" containerID="bb38557ce6c2a3dde0cbb5ce531c3a5a07ede5241b7111e3b74c8ba77a757510" exitCode=0 Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.927012 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d5ff467f-kv9dn" event={"ID":"87fd4f9e-e9c3-4427-884e-44dc39055ab4","Type":"ContainerStarted","Data":"2b5d02346c239a5849fb819ef3820e81647dc04d6468f8593bddd651dc54d9bb"} Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.944059 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-86cd8676db-mltgz" event={"ID":"beec5517-fd48-4017-9cb3-1102001e9439","Type":"ContainerStarted","Data":"6704f3167390e0d9317ad30e3d1665ea25c069e3e46c633a55eb726dca823457"} Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.946444 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"369a872a-8dd2-409e-9938-2a01cd707dc8","Type":"ContainerStarted","Data":"1837c52b273fb56bebcf523432b81c3329c1323f0e03857c6a26629ef8950fd7"} Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.950804 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 12:37:51 crc kubenswrapper[5002]: I0930 12:37:51.952912 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-64bf5676fd-f8zrr" event={"ID":"fc432228-1ae9-4a76-a81f-a8a7d2d44492","Type":"ContainerStarted","Data":"02779d91864e85b2946039d14d79109022cc0870b282c7762d31943dfae52ab7"} Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.050370 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59d5ff467f-kv9dn"] Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.105593 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-zfwrs"] Sep 30 12:37:52 crc kubenswrapper[5002]: E0930 12:37:52.105990 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d52650cc-2190-4b6f-8a3f-f506262075d8" containerName="neutron-db-sync" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.106004 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="d52650cc-2190-4b6f-8a3f-f506262075d8" containerName="neutron-db-sync" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.106690 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="d52650cc-2190-4b6f-8a3f-f506262075d8" containerName="neutron-db-sync" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.109421 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.144725 5002 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="7f7eb2da-349a-49e9-9e42-7e61b36682f0" podUID="369a872a-8dd2-409e-9938-2a01cd707dc8" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.163435 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-zfwrs"] Sep 30 12:37:52 crc kubenswrapper[5002]: E0930 12:37:52.176817 5002 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd52650cc_2190_4b6f_8a3f_f506262075d8.slice/crio-9ffa9a9f49b70ab244e75af3ed34c8db009cbefcc0d031929dfd1065d975d8b9\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod87fd4f9e_e9c3_4427_884e_44dc39055ab4.slice/crio-conmon-bb38557ce6c2a3dde0cbb5ce531c3a5a07ede5241b7111e3b74c8ba77a757510.scope\": RecentStats: unable to find data in memory cache]" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.202642 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-784489f99d-nqwcr"] Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.204341 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-784489f99d-nqwcr" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.209771 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.210064 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.210458 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.210555 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-8dlzq" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.215006 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-784489f99d-nqwcr"] Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.261325 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-zfwrs\" (UID: \"accc1f38-16e0-4b04-98d4-ebece5b81989\") " pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.261509 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-zfwrs\" (UID: \"accc1f38-16e0-4b04-98d4-ebece5b81989\") " pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.261547 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxtcl\" (UniqueName: \"kubernetes.io/projected/accc1f38-16e0-4b04-98d4-ebece5b81989-kube-api-access-bxtcl\") pod \"dnsmasq-dns-75c8ddd69c-zfwrs\" (UID: \"accc1f38-16e0-4b04-98d4-ebece5b81989\") " pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.261604 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-zfwrs\" (UID: \"accc1f38-16e0-4b04-98d4-ebece5b81989\") " pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.261629 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-config\") pod \"dnsmasq-dns-75c8ddd69c-zfwrs\" (UID: \"accc1f38-16e0-4b04-98d4-ebece5b81989\") " pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.261806 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-zfwrs\" (UID: \"accc1f38-16e0-4b04-98d4-ebece5b81989\") " pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.364783 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-zfwrs\" (UID: \"accc1f38-16e0-4b04-98d4-ebece5b81989\") " pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.365289 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dwlw\" (UniqueName: \"kubernetes.io/projected/10afbe7c-91df-4843-96b2-ad180a2b9cd1-kube-api-access-8dwlw\") pod \"neutron-784489f99d-nqwcr\" (UID: \"10afbe7c-91df-4843-96b2-ad180a2b9cd1\") " pod="openstack/neutron-784489f99d-nqwcr" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.365327 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/10afbe7c-91df-4843-96b2-ad180a2b9cd1-ovndb-tls-certs\") pod \"neutron-784489f99d-nqwcr\" (UID: \"10afbe7c-91df-4843-96b2-ad180a2b9cd1\") " pod="openstack/neutron-784489f99d-nqwcr" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.365408 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-zfwrs\" (UID: \"accc1f38-16e0-4b04-98d4-ebece5b81989\") " pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.365451 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxtcl\" (UniqueName: \"kubernetes.io/projected/accc1f38-16e0-4b04-98d4-ebece5b81989-kube-api-access-bxtcl\") pod \"dnsmasq-dns-75c8ddd69c-zfwrs\" (UID: \"accc1f38-16e0-4b04-98d4-ebece5b81989\") " pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.365556 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-zfwrs\" (UID: \"accc1f38-16e0-4b04-98d4-ebece5b81989\") " pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.365594 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-config\") pod \"dnsmasq-dns-75c8ddd69c-zfwrs\" (UID: \"accc1f38-16e0-4b04-98d4-ebece5b81989\") " pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.365636 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/10afbe7c-91df-4843-96b2-ad180a2b9cd1-config\") pod \"neutron-784489f99d-nqwcr\" (UID: \"10afbe7c-91df-4843-96b2-ad180a2b9cd1\") " pod="openstack/neutron-784489f99d-nqwcr" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.365726 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10afbe7c-91df-4843-96b2-ad180a2b9cd1-combined-ca-bundle\") pod \"neutron-784489f99d-nqwcr\" (UID: \"10afbe7c-91df-4843-96b2-ad180a2b9cd1\") " pod="openstack/neutron-784489f99d-nqwcr" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.365863 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/10afbe7c-91df-4843-96b2-ad180a2b9cd1-httpd-config\") pod \"neutron-784489f99d-nqwcr\" (UID: \"10afbe7c-91df-4843-96b2-ad180a2b9cd1\") " pod="openstack/neutron-784489f99d-nqwcr" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.365935 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-zfwrs\" (UID: \"accc1f38-16e0-4b04-98d4-ebece5b81989\") " pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.366180 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-zfwrs\" (UID: \"accc1f38-16e0-4b04-98d4-ebece5b81989\") " pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.366538 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-zfwrs\" (UID: \"accc1f38-16e0-4b04-98d4-ebece5b81989\") " pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.366728 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-zfwrs\" (UID: \"accc1f38-16e0-4b04-98d4-ebece5b81989\") " pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.367187 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-config\") pod \"dnsmasq-dns-75c8ddd69c-zfwrs\" (UID: \"accc1f38-16e0-4b04-98d4-ebece5b81989\") " pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.368593 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-zfwrs\" (UID: \"accc1f38-16e0-4b04-98d4-ebece5b81989\") " pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.387343 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxtcl\" (UniqueName: \"kubernetes.io/projected/accc1f38-16e0-4b04-98d4-ebece5b81989-kube-api-access-bxtcl\") pod \"dnsmasq-dns-75c8ddd69c-zfwrs\" (UID: \"accc1f38-16e0-4b04-98d4-ebece5b81989\") " pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" Sep 30 12:37:52 crc kubenswrapper[5002]: E0930 12:37:52.443679 5002 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Sep 30 12:37:52 crc kubenswrapper[5002]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/87fd4f9e-e9c3-4427-884e-44dc39055ab4/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Sep 30 12:37:52 crc kubenswrapper[5002]: > podSandboxID="2b5d02346c239a5849fb819ef3820e81647dc04d6468f8593bddd651dc54d9bb" Sep 30 12:37:52 crc kubenswrapper[5002]: E0930 12:37:52.444000 5002 kuberuntime_manager.go:1274] "Unhandled Error" err=< Sep 30 12:37:52 crc kubenswrapper[5002]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n679h54h99h9h97h8ch64h67fh6h9bhdh55bh555h694h5bbh87hc8h699h5bbh9fh5bbh59h6ch644h5f4h67hd5h59dh54dh5dfh64h59bq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-swift-storage-0,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-swift-storage-0,SubPath:dns-swift-storage-0,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-nb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-nb,SubPath:ovsdbserver-nb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-sb,SubPath:ovsdbserver-sb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ffchm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-59d5ff467f-kv9dn_openstack(87fd4f9e-e9c3-4427-884e-44dc39055ab4): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/87fd4f9e-e9c3-4427-884e-44dc39055ab4/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Sep 30 12:37:52 crc kubenswrapper[5002]: > logger="UnhandledError" Sep 30 12:37:52 crc kubenswrapper[5002]: E0930 12:37:52.445909 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/87fd4f9e-e9c3-4427-884e-44dc39055ab4/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-59d5ff467f-kv9dn" podUID="87fd4f9e-e9c3-4427-884e-44dc39055ab4" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.469274 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/10afbe7c-91df-4843-96b2-ad180a2b9cd1-config\") pod \"neutron-784489f99d-nqwcr\" (UID: \"10afbe7c-91df-4843-96b2-ad180a2b9cd1\") " pod="openstack/neutron-784489f99d-nqwcr" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.469363 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10afbe7c-91df-4843-96b2-ad180a2b9cd1-combined-ca-bundle\") pod \"neutron-784489f99d-nqwcr\" (UID: \"10afbe7c-91df-4843-96b2-ad180a2b9cd1\") " pod="openstack/neutron-784489f99d-nqwcr" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.469420 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/10afbe7c-91df-4843-96b2-ad180a2b9cd1-httpd-config\") pod \"neutron-784489f99d-nqwcr\" (UID: \"10afbe7c-91df-4843-96b2-ad180a2b9cd1\") " pod="openstack/neutron-784489f99d-nqwcr" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.469510 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dwlw\" (UniqueName: \"kubernetes.io/projected/10afbe7c-91df-4843-96b2-ad180a2b9cd1-kube-api-access-8dwlw\") pod \"neutron-784489f99d-nqwcr\" (UID: \"10afbe7c-91df-4843-96b2-ad180a2b9cd1\") " pod="openstack/neutron-784489f99d-nqwcr" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.469534 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/10afbe7c-91df-4843-96b2-ad180a2b9cd1-ovndb-tls-certs\") pod \"neutron-784489f99d-nqwcr\" (UID: \"10afbe7c-91df-4843-96b2-ad180a2b9cd1\") " pod="openstack/neutron-784489f99d-nqwcr" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.471510 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.475156 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/10afbe7c-91df-4843-96b2-ad180a2b9cd1-httpd-config\") pod \"neutron-784489f99d-nqwcr\" (UID: \"10afbe7c-91df-4843-96b2-ad180a2b9cd1\") " pod="openstack/neutron-784489f99d-nqwcr" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.476170 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/10afbe7c-91df-4843-96b2-ad180a2b9cd1-ovndb-tls-certs\") pod \"neutron-784489f99d-nqwcr\" (UID: \"10afbe7c-91df-4843-96b2-ad180a2b9cd1\") " pod="openstack/neutron-784489f99d-nqwcr" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.476335 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/10afbe7c-91df-4843-96b2-ad180a2b9cd1-config\") pod \"neutron-784489f99d-nqwcr\" (UID: \"10afbe7c-91df-4843-96b2-ad180a2b9cd1\") " pod="openstack/neutron-784489f99d-nqwcr" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.477386 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10afbe7c-91df-4843-96b2-ad180a2b9cd1-combined-ca-bundle\") pod \"neutron-784489f99d-nqwcr\" (UID: \"10afbe7c-91df-4843-96b2-ad180a2b9cd1\") " pod="openstack/neutron-784489f99d-nqwcr" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.491235 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dwlw\" (UniqueName: \"kubernetes.io/projected/10afbe7c-91df-4843-96b2-ad180a2b9cd1-kube-api-access-8dwlw\") pod \"neutron-784489f99d-nqwcr\" (UID: \"10afbe7c-91df-4843-96b2-ad180a2b9cd1\") " pod="openstack/neutron-784489f99d-nqwcr" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.582409 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-784489f99d-nqwcr" Sep 30 12:37:52 crc kubenswrapper[5002]: E0930 12:37:52.722651 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-placement-api:current-podified\\\"\"" pod="openstack/placement-db-sync-bnf58" podUID="4fdf0c77-68ae-41ff-b6b5-122baa461b8c" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.728365 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f7eb2da-349a-49e9-9e42-7e61b36682f0" path="/var/lib/kubelet/pods/7f7eb2da-349a-49e9-9e42-7e61b36682f0/volumes" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.858212 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-zfwrs"] Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.969304 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-86cd8676db-mltgz" event={"ID":"beec5517-fd48-4017-9cb3-1102001e9439","Type":"ContainerStarted","Data":"2ce09c6165125531983dc65ca7e3bf84ffa6db66b1a0a6cdbe41ee301e6b6e0a"} Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.969346 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-86cd8676db-mltgz" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.969356 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-86cd8676db-mltgz" event={"ID":"beec5517-fd48-4017-9cb3-1102001e9439","Type":"ContainerStarted","Data":"3ee31987cac31de7a53839e25d36aa1a3805641e725f6df37d71980d6c990de9"} Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.969651 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-86cd8676db-mltgz" Sep 30 12:37:52 crc kubenswrapper[5002]: I0930 12:37:52.988011 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-86cd8676db-mltgz" podStartSLOduration=2.987989195 podStartE2EDuration="2.987989195s" podCreationTimestamp="2025-09-30 12:37:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:37:52.987341569 +0000 UTC m=+1047.237023725" watchObservedRunningTime="2025-09-30 12:37:52.987989195 +0000 UTC m=+1047.237671341" Sep 30 12:37:53 crc kubenswrapper[5002]: I0930 12:37:53.450169 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d5ff467f-kv9dn" Sep 30 12:37:53 crc kubenswrapper[5002]: I0930 12:37:53.525230 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-config\") pod \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\" (UID: \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\") " Sep 30 12:37:53 crc kubenswrapper[5002]: I0930 12:37:53.525348 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ffchm\" (UniqueName: \"kubernetes.io/projected/87fd4f9e-e9c3-4427-884e-44dc39055ab4-kube-api-access-ffchm\") pod \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\" (UID: \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\") " Sep 30 12:37:53 crc kubenswrapper[5002]: I0930 12:37:53.525395 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-ovsdbserver-nb\") pod \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\" (UID: \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\") " Sep 30 12:37:53 crc kubenswrapper[5002]: I0930 12:37:53.525449 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-ovsdbserver-sb\") pod \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\" (UID: \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\") " Sep 30 12:37:53 crc kubenswrapper[5002]: I0930 12:37:53.525521 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-dns-svc\") pod \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\" (UID: \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\") " Sep 30 12:37:53 crc kubenswrapper[5002]: I0930 12:37:53.525550 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-dns-swift-storage-0\") pod \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\" (UID: \"87fd4f9e-e9c3-4427-884e-44dc39055ab4\") " Sep 30 12:37:53 crc kubenswrapper[5002]: I0930 12:37:53.531250 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87fd4f9e-e9c3-4427-884e-44dc39055ab4-kube-api-access-ffchm" (OuterVolumeSpecName: "kube-api-access-ffchm") pod "87fd4f9e-e9c3-4427-884e-44dc39055ab4" (UID: "87fd4f9e-e9c3-4427-884e-44dc39055ab4"). InnerVolumeSpecName "kube-api-access-ffchm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:37:53 crc kubenswrapper[5002]: I0930 12:37:53.591820 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "87fd4f9e-e9c3-4427-884e-44dc39055ab4" (UID: "87fd4f9e-e9c3-4427-884e-44dc39055ab4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:37:53 crc kubenswrapper[5002]: I0930 12:37:53.598151 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "87fd4f9e-e9c3-4427-884e-44dc39055ab4" (UID: "87fd4f9e-e9c3-4427-884e-44dc39055ab4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:37:53 crc kubenswrapper[5002]: I0930 12:37:53.601550 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "87fd4f9e-e9c3-4427-884e-44dc39055ab4" (UID: "87fd4f9e-e9c3-4427-884e-44dc39055ab4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:37:53 crc kubenswrapper[5002]: I0930 12:37:53.617758 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-config" (OuterVolumeSpecName: "config") pod "87fd4f9e-e9c3-4427-884e-44dc39055ab4" (UID: "87fd4f9e-e9c3-4427-884e-44dc39055ab4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:37:53 crc kubenswrapper[5002]: I0930 12:37:53.627357 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:53 crc kubenswrapper[5002]: I0930 12:37:53.627383 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:53 crc kubenswrapper[5002]: I0930 12:37:53.627393 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:53 crc kubenswrapper[5002]: I0930 12:37:53.627403 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:53 crc kubenswrapper[5002]: I0930 12:37:53.627412 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ffchm\" (UniqueName: \"kubernetes.io/projected/87fd4f9e-e9c3-4427-884e-44dc39055ab4-kube-api-access-ffchm\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:53 crc kubenswrapper[5002]: I0930 12:37:53.638021 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "87fd4f9e-e9c3-4427-884e-44dc39055ab4" (UID: "87fd4f9e-e9c3-4427-884e-44dc39055ab4"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:37:53 crc kubenswrapper[5002]: I0930 12:37:53.732612 5002 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/87fd4f9e-e9c3-4427-884e-44dc39055ab4-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:53 crc kubenswrapper[5002]: I0930 12:37:53.880905 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-f8777b74-fpwh2" podUID="fd4d1d88-c894-496a-b2ee-00bf80fa2415" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Sep 30 12:37:53 crc kubenswrapper[5002]: I0930 12:37:53.981706 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d5ff467f-kv9dn" Sep 30 12:37:53 crc kubenswrapper[5002]: I0930 12:37:53.981704 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d5ff467f-kv9dn" event={"ID":"87fd4f9e-e9c3-4427-884e-44dc39055ab4","Type":"ContainerDied","Data":"2b5d02346c239a5849fb819ef3820e81647dc04d6468f8593bddd651dc54d9bb"} Sep 30 12:37:53 crc kubenswrapper[5002]: I0930 12:37:53.982056 5002 scope.go:117] "RemoveContainer" containerID="bb38557ce6c2a3dde0cbb5ce531c3a5a07ede5241b7111e3b74c8ba77a757510" Sep 30 12:37:53 crc kubenswrapper[5002]: I0930 12:37:53.999244 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" event={"ID":"accc1f38-16e0-4b04-98d4-ebece5b81989","Type":"ContainerStarted","Data":"fb45501af464b9efa21f58883a09a5a3b58fdc97a29ed6df43d4af6051754c69"} Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.025701 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-85654c5dc5-xmznd"] Sep 30 12:37:54 crc kubenswrapper[5002]: E0930 12:37:54.044093 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87fd4f9e-e9c3-4427-884e-44dc39055ab4" containerName="init" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.044366 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="87fd4f9e-e9c3-4427-884e-44dc39055ab4" containerName="init" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.047570 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="87fd4f9e-e9c3-4427-884e-44dc39055ab4" containerName="init" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.052337 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.063653 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.063854 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.068588 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.070956 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-85654c5dc5-xmznd"] Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.118494 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59d5ff467f-kv9dn"] Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.130821 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59d5ff467f-kv9dn"] Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.139509 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c487a894-05f0-4ed3-9b0a-fc5bfbae3f74-etc-swift\") pod \"swift-proxy-85654c5dc5-xmznd\" (UID: \"c487a894-05f0-4ed3-9b0a-fc5bfbae3f74\") " pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.139647 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c487a894-05f0-4ed3-9b0a-fc5bfbae3f74-run-httpd\") pod \"swift-proxy-85654c5dc5-xmznd\" (UID: \"c487a894-05f0-4ed3-9b0a-fc5bfbae3f74\") " pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.139672 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c487a894-05f0-4ed3-9b0a-fc5bfbae3f74-log-httpd\") pod \"swift-proxy-85654c5dc5-xmznd\" (UID: \"c487a894-05f0-4ed3-9b0a-fc5bfbae3f74\") " pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.139703 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c487a894-05f0-4ed3-9b0a-fc5bfbae3f74-internal-tls-certs\") pod \"swift-proxy-85654c5dc5-xmznd\" (UID: \"c487a894-05f0-4ed3-9b0a-fc5bfbae3f74\") " pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.139762 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84f2n\" (UniqueName: \"kubernetes.io/projected/c487a894-05f0-4ed3-9b0a-fc5bfbae3f74-kube-api-access-84f2n\") pod \"swift-proxy-85654c5dc5-xmznd\" (UID: \"c487a894-05f0-4ed3-9b0a-fc5bfbae3f74\") " pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.139814 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c487a894-05f0-4ed3-9b0a-fc5bfbae3f74-config-data\") pod \"swift-proxy-85654c5dc5-xmznd\" (UID: \"c487a894-05f0-4ed3-9b0a-fc5bfbae3f74\") " pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.139852 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c487a894-05f0-4ed3-9b0a-fc5bfbae3f74-combined-ca-bundle\") pod \"swift-proxy-85654c5dc5-xmznd\" (UID: \"c487a894-05f0-4ed3-9b0a-fc5bfbae3f74\") " pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.139891 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c487a894-05f0-4ed3-9b0a-fc5bfbae3f74-public-tls-certs\") pod \"swift-proxy-85654c5dc5-xmznd\" (UID: \"c487a894-05f0-4ed3-9b0a-fc5bfbae3f74\") " pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.241440 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c487a894-05f0-4ed3-9b0a-fc5bfbae3f74-log-httpd\") pod \"swift-proxy-85654c5dc5-xmznd\" (UID: \"c487a894-05f0-4ed3-9b0a-fc5bfbae3f74\") " pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.241811 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c487a894-05f0-4ed3-9b0a-fc5bfbae3f74-run-httpd\") pod \"swift-proxy-85654c5dc5-xmznd\" (UID: \"c487a894-05f0-4ed3-9b0a-fc5bfbae3f74\") " pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.241872 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c487a894-05f0-4ed3-9b0a-fc5bfbae3f74-internal-tls-certs\") pod \"swift-proxy-85654c5dc5-xmznd\" (UID: \"c487a894-05f0-4ed3-9b0a-fc5bfbae3f74\") " pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.241945 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84f2n\" (UniqueName: \"kubernetes.io/projected/c487a894-05f0-4ed3-9b0a-fc5bfbae3f74-kube-api-access-84f2n\") pod \"swift-proxy-85654c5dc5-xmznd\" (UID: \"c487a894-05f0-4ed3-9b0a-fc5bfbae3f74\") " pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.241967 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c487a894-05f0-4ed3-9b0a-fc5bfbae3f74-log-httpd\") pod \"swift-proxy-85654c5dc5-xmznd\" (UID: \"c487a894-05f0-4ed3-9b0a-fc5bfbae3f74\") " pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.242051 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c487a894-05f0-4ed3-9b0a-fc5bfbae3f74-config-data\") pod \"swift-proxy-85654c5dc5-xmznd\" (UID: \"c487a894-05f0-4ed3-9b0a-fc5bfbae3f74\") " pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.242112 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c487a894-05f0-4ed3-9b0a-fc5bfbae3f74-combined-ca-bundle\") pod \"swift-proxy-85654c5dc5-xmznd\" (UID: \"c487a894-05f0-4ed3-9b0a-fc5bfbae3f74\") " pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.242142 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c487a894-05f0-4ed3-9b0a-fc5bfbae3f74-public-tls-certs\") pod \"swift-proxy-85654c5dc5-xmznd\" (UID: \"c487a894-05f0-4ed3-9b0a-fc5bfbae3f74\") " pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.242234 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c487a894-05f0-4ed3-9b0a-fc5bfbae3f74-etc-swift\") pod \"swift-proxy-85654c5dc5-xmznd\" (UID: \"c487a894-05f0-4ed3-9b0a-fc5bfbae3f74\") " pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.242531 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c487a894-05f0-4ed3-9b0a-fc5bfbae3f74-run-httpd\") pod \"swift-proxy-85654c5dc5-xmznd\" (UID: \"c487a894-05f0-4ed3-9b0a-fc5bfbae3f74\") " pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.248781 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c487a894-05f0-4ed3-9b0a-fc5bfbae3f74-etc-swift\") pod \"swift-proxy-85654c5dc5-xmznd\" (UID: \"c487a894-05f0-4ed3-9b0a-fc5bfbae3f74\") " pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.251362 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c487a894-05f0-4ed3-9b0a-fc5bfbae3f74-combined-ca-bundle\") pod \"swift-proxy-85654c5dc5-xmznd\" (UID: \"c487a894-05f0-4ed3-9b0a-fc5bfbae3f74\") " pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.251411 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c487a894-05f0-4ed3-9b0a-fc5bfbae3f74-public-tls-certs\") pod \"swift-proxy-85654c5dc5-xmznd\" (UID: \"c487a894-05f0-4ed3-9b0a-fc5bfbae3f74\") " pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.251504 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c487a894-05f0-4ed3-9b0a-fc5bfbae3f74-config-data\") pod \"swift-proxy-85654c5dc5-xmznd\" (UID: \"c487a894-05f0-4ed3-9b0a-fc5bfbae3f74\") " pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.256549 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c487a894-05f0-4ed3-9b0a-fc5bfbae3f74-internal-tls-certs\") pod \"swift-proxy-85654c5dc5-xmznd\" (UID: \"c487a894-05f0-4ed3-9b0a-fc5bfbae3f74\") " pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.259491 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84f2n\" (UniqueName: \"kubernetes.io/projected/c487a894-05f0-4ed3-9b0a-fc5bfbae3f74-kube-api-access-84f2n\") pod \"swift-proxy-85654c5dc5-xmznd\" (UID: \"c487a894-05f0-4ed3-9b0a-fc5bfbae3f74\") " pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.441245 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:37:54 crc kubenswrapper[5002]: I0930 12:37:54.695333 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87fd4f9e-e9c3-4427-884e-44dc39055ab4" path="/var/lib/kubelet/pods/87fd4f9e-e9c3-4427-884e-44dc39055ab4/volumes" Sep 30 12:37:55 crc kubenswrapper[5002]: I0930 12:37:55.024895 5002 generic.go:334] "Generic (PLEG): container finished" podID="accc1f38-16e0-4b04-98d4-ebece5b81989" containerID="d249b247b3be252d97cbcd924a57ebbd24c8000d9a6076ed1301b73b667f7218" exitCode=0 Sep 30 12:37:55 crc kubenswrapper[5002]: I0930 12:37:55.024949 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" event={"ID":"accc1f38-16e0-4b04-98d4-ebece5b81989","Type":"ContainerDied","Data":"d249b247b3be252d97cbcd924a57ebbd24c8000d9a6076ed1301b73b667f7218"} Sep 30 12:37:55 crc kubenswrapper[5002]: I0930 12:37:55.034362 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-64bf5676fd-f8zrr" event={"ID":"fc432228-1ae9-4a76-a81f-a8a7d2d44492","Type":"ContainerStarted","Data":"a1547ea7936c9f4a75686ab6ef4a17df6dcdf663523129c2dae12d7fcd2c4b7d"} Sep 30 12:37:55 crc kubenswrapper[5002]: I0930 12:37:55.034415 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-64bf5676fd-f8zrr" event={"ID":"fc432228-1ae9-4a76-a81f-a8a7d2d44492","Type":"ContainerStarted","Data":"0b696276caf22e3437c78c4440f3308ba1798feeacc06df5c9c0fa66c2e9cfa5"} Sep 30 12:37:55 crc kubenswrapper[5002]: I0930 12:37:55.049953 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5976c988bc-j7gh5" event={"ID":"295327d8-7973-4826-a8ab-34dcf2f4b5d5","Type":"ContainerStarted","Data":"925d1746e9cd22ae39b5947f9db50cd5657c3da0a50e7aeb08704fcf4f1f4405"} Sep 30 12:37:55 crc kubenswrapper[5002]: I0930 12:37:55.049994 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5976c988bc-j7gh5" event={"ID":"295327d8-7973-4826-a8ab-34dcf2f4b5d5","Type":"ContainerStarted","Data":"5696f2eff2bae692a9477b134b0a7c66c96bb134a31e6e3b5f4c291215228f82"} Sep 30 12:37:55 crc kubenswrapper[5002]: I0930 12:37:55.070688 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-64bf5676fd-f8zrr" podStartSLOduration=1.941553212 podStartE2EDuration="5.070661694s" podCreationTimestamp="2025-09-30 12:37:50 +0000 UTC" firstStartedPulling="2025-09-30 12:37:51.089190661 +0000 UTC m=+1045.338872807" lastFinishedPulling="2025-09-30 12:37:54.218299143 +0000 UTC m=+1048.467981289" observedRunningTime="2025-09-30 12:37:55.064197299 +0000 UTC m=+1049.313879465" watchObservedRunningTime="2025-09-30 12:37:55.070661694 +0000 UTC m=+1049.320343840" Sep 30 12:37:55 crc kubenswrapper[5002]: I0930 12:37:55.098903 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-5976c988bc-j7gh5" podStartSLOduration=2.163310611 podStartE2EDuration="5.098861192s" podCreationTimestamp="2025-09-30 12:37:50 +0000 UTC" firstStartedPulling="2025-09-30 12:37:51.291484274 +0000 UTC m=+1045.541166420" lastFinishedPulling="2025-09-30 12:37:54.227034855 +0000 UTC m=+1048.476717001" observedRunningTime="2025-09-30 12:37:55.079034207 +0000 UTC m=+1049.328716363" watchObservedRunningTime="2025-09-30 12:37:55.098861192 +0000 UTC m=+1049.348543348" Sep 30 12:37:55 crc kubenswrapper[5002]: I0930 12:37:55.289319 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-784489f99d-nqwcr"] Sep 30 12:37:55 crc kubenswrapper[5002]: W0930 12:37:55.300432 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod10afbe7c_91df_4843_96b2_ad180a2b9cd1.slice/crio-87f876174b9cfab3af426047fd90a521beea812e7ce8865cf310fecf7e2baaea WatchSource:0}: Error finding container 87f876174b9cfab3af426047fd90a521beea812e7ce8865cf310fecf7e2baaea: Status 404 returned error can't find the container with id 87f876174b9cfab3af426047fd90a521beea812e7ce8865cf310fecf7e2baaea Sep 30 12:37:55 crc kubenswrapper[5002]: I0930 12:37:55.905245 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5c946cd5ff-n7x9t"] Sep 30 12:37:55 crc kubenswrapper[5002]: I0930 12:37:55.916984 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5c946cd5ff-n7x9t" Sep 30 12:37:55 crc kubenswrapper[5002]: I0930 12:37:55.920610 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Sep 30 12:37:55 crc kubenswrapper[5002]: I0930 12:37:55.921665 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Sep 30 12:37:55 crc kubenswrapper[5002]: I0930 12:37:55.958566 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-85654c5dc5-xmznd"] Sep 30 12:37:55 crc kubenswrapper[5002]: I0930 12:37:55.966109 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5c946cd5ff-n7x9t"] Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.017062 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1df12b89-f778-45b2-b39a-c95700262b6e-combined-ca-bundle\") pod \"neutron-5c946cd5ff-n7x9t\" (UID: \"1df12b89-f778-45b2-b39a-c95700262b6e\") " pod="openstack/neutron-5c946cd5ff-n7x9t" Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.017125 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/1df12b89-f778-45b2-b39a-c95700262b6e-httpd-config\") pod \"neutron-5c946cd5ff-n7x9t\" (UID: \"1df12b89-f778-45b2-b39a-c95700262b6e\") " pod="openstack/neutron-5c946cd5ff-n7x9t" Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.017191 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1df12b89-f778-45b2-b39a-c95700262b6e-public-tls-certs\") pod \"neutron-5c946cd5ff-n7x9t\" (UID: \"1df12b89-f778-45b2-b39a-c95700262b6e\") " pod="openstack/neutron-5c946cd5ff-n7x9t" Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.017273 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1df12b89-f778-45b2-b39a-c95700262b6e-internal-tls-certs\") pod \"neutron-5c946cd5ff-n7x9t\" (UID: \"1df12b89-f778-45b2-b39a-c95700262b6e\") " pod="openstack/neutron-5c946cd5ff-n7x9t" Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.017311 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1df12b89-f778-45b2-b39a-c95700262b6e-config\") pod \"neutron-5c946cd5ff-n7x9t\" (UID: \"1df12b89-f778-45b2-b39a-c95700262b6e\") " pod="openstack/neutron-5c946cd5ff-n7x9t" Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.017357 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pwkh\" (UniqueName: \"kubernetes.io/projected/1df12b89-f778-45b2-b39a-c95700262b6e-kube-api-access-4pwkh\") pod \"neutron-5c946cd5ff-n7x9t\" (UID: \"1df12b89-f778-45b2-b39a-c95700262b6e\") " pod="openstack/neutron-5c946cd5ff-n7x9t" Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.017398 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1df12b89-f778-45b2-b39a-c95700262b6e-ovndb-tls-certs\") pod \"neutron-5c946cd5ff-n7x9t\" (UID: \"1df12b89-f778-45b2-b39a-c95700262b6e\") " pod="openstack/neutron-5c946cd5ff-n7x9t" Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.073051 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-vxpxc" event={"ID":"6c398da4-8e97-4ee7-83bb-f958c41fabff","Type":"ContainerStarted","Data":"a99c08c483c48a24fd03aaf010f4c106d01c65729b9e0bebd44143567528ada9"} Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.078461 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-784489f99d-nqwcr" event={"ID":"10afbe7c-91df-4843-96b2-ad180a2b9cd1","Type":"ContainerStarted","Data":"6d26ffb59640b1d3e6ca14e606cdfaa97dc7ecba65a631be48df0d924d0febc3"} Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.078536 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-784489f99d-nqwcr" event={"ID":"10afbe7c-91df-4843-96b2-ad180a2b9cd1","Type":"ContainerStarted","Data":"f93a8c90672f5396b7f993dea6d68a3053142b40633703b7b6d3a573be0d8c22"} Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.078551 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-784489f99d-nqwcr" event={"ID":"10afbe7c-91df-4843-96b2-ad180a2b9cd1","Type":"ContainerStarted","Data":"87f876174b9cfab3af426047fd90a521beea812e7ce8865cf310fecf7e2baaea"} Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.078572 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-784489f99d-nqwcr" Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.084966 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" event={"ID":"accc1f38-16e0-4b04-98d4-ebece5b81989","Type":"ContainerStarted","Data":"06c4bf846bd93f27fcfc0e1370ee7441dddbd94ac2a871840b022bada23e98fa"} Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.085917 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.093632 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-85654c5dc5-xmznd" event={"ID":"c487a894-05f0-4ed3-9b0a-fc5bfbae3f74","Type":"ContainerStarted","Data":"28936c307d2e614b88b95b8af3bff9f404625742d988fa6ea7a0272e0f345815"} Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.101890 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-vxpxc" podStartSLOduration=5.969057284 podStartE2EDuration="44.101872519s" podCreationTimestamp="2025-09-30 12:37:12 +0000 UTC" firstStartedPulling="2025-09-30 12:37:16.094310803 +0000 UTC m=+1010.343992949" lastFinishedPulling="2025-09-30 12:37:54.227126038 +0000 UTC m=+1048.476808184" observedRunningTime="2025-09-30 12:37:56.088797187 +0000 UTC m=+1050.338479343" watchObservedRunningTime="2025-09-30 12:37:56.101872519 +0000 UTC m=+1050.351554665" Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.110614 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-784489f99d-nqwcr" podStartSLOduration=4.110594002 podStartE2EDuration="4.110594002s" podCreationTimestamp="2025-09-30 12:37:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:37:56.108037307 +0000 UTC m=+1050.357719483" watchObservedRunningTime="2025-09-30 12:37:56.110594002 +0000 UTC m=+1050.360276148" Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.119127 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1df12b89-f778-45b2-b39a-c95700262b6e-config\") pod \"neutron-5c946cd5ff-n7x9t\" (UID: \"1df12b89-f778-45b2-b39a-c95700262b6e\") " pod="openstack/neutron-5c946cd5ff-n7x9t" Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.119746 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pwkh\" (UniqueName: \"kubernetes.io/projected/1df12b89-f778-45b2-b39a-c95700262b6e-kube-api-access-4pwkh\") pod \"neutron-5c946cd5ff-n7x9t\" (UID: \"1df12b89-f778-45b2-b39a-c95700262b6e\") " pod="openstack/neutron-5c946cd5ff-n7x9t" Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.119800 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1df12b89-f778-45b2-b39a-c95700262b6e-ovndb-tls-certs\") pod \"neutron-5c946cd5ff-n7x9t\" (UID: \"1df12b89-f778-45b2-b39a-c95700262b6e\") " pod="openstack/neutron-5c946cd5ff-n7x9t" Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.120191 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1df12b89-f778-45b2-b39a-c95700262b6e-combined-ca-bundle\") pod \"neutron-5c946cd5ff-n7x9t\" (UID: \"1df12b89-f778-45b2-b39a-c95700262b6e\") " pod="openstack/neutron-5c946cd5ff-n7x9t" Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.120219 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/1df12b89-f778-45b2-b39a-c95700262b6e-httpd-config\") pod \"neutron-5c946cd5ff-n7x9t\" (UID: \"1df12b89-f778-45b2-b39a-c95700262b6e\") " pod="openstack/neutron-5c946cd5ff-n7x9t" Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.120261 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1df12b89-f778-45b2-b39a-c95700262b6e-public-tls-certs\") pod \"neutron-5c946cd5ff-n7x9t\" (UID: \"1df12b89-f778-45b2-b39a-c95700262b6e\") " pod="openstack/neutron-5c946cd5ff-n7x9t" Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.120316 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1df12b89-f778-45b2-b39a-c95700262b6e-internal-tls-certs\") pod \"neutron-5c946cd5ff-n7x9t\" (UID: \"1df12b89-f778-45b2-b39a-c95700262b6e\") " pod="openstack/neutron-5c946cd5ff-n7x9t" Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.125007 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1df12b89-f778-45b2-b39a-c95700262b6e-internal-tls-certs\") pod \"neutron-5c946cd5ff-n7x9t\" (UID: \"1df12b89-f778-45b2-b39a-c95700262b6e\") " pod="openstack/neutron-5c946cd5ff-n7x9t" Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.125464 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/1df12b89-f778-45b2-b39a-c95700262b6e-config\") pod \"neutron-5c946cd5ff-n7x9t\" (UID: \"1df12b89-f778-45b2-b39a-c95700262b6e\") " pod="openstack/neutron-5c946cd5ff-n7x9t" Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.144076 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1df12b89-f778-45b2-b39a-c95700262b6e-ovndb-tls-certs\") pod \"neutron-5c946cd5ff-n7x9t\" (UID: \"1df12b89-f778-45b2-b39a-c95700262b6e\") " pod="openstack/neutron-5c946cd5ff-n7x9t" Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.150361 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pwkh\" (UniqueName: \"kubernetes.io/projected/1df12b89-f778-45b2-b39a-c95700262b6e-kube-api-access-4pwkh\") pod \"neutron-5c946cd5ff-n7x9t\" (UID: \"1df12b89-f778-45b2-b39a-c95700262b6e\") " pod="openstack/neutron-5c946cd5ff-n7x9t" Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.159402 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/1df12b89-f778-45b2-b39a-c95700262b6e-httpd-config\") pod \"neutron-5c946cd5ff-n7x9t\" (UID: \"1df12b89-f778-45b2-b39a-c95700262b6e\") " pod="openstack/neutron-5c946cd5ff-n7x9t" Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.160063 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1df12b89-f778-45b2-b39a-c95700262b6e-public-tls-certs\") pod \"neutron-5c946cd5ff-n7x9t\" (UID: \"1df12b89-f778-45b2-b39a-c95700262b6e\") " pod="openstack/neutron-5c946cd5ff-n7x9t" Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.160866 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1df12b89-f778-45b2-b39a-c95700262b6e-combined-ca-bundle\") pod \"neutron-5c946cd5ff-n7x9t\" (UID: \"1df12b89-f778-45b2-b39a-c95700262b6e\") " pod="openstack/neutron-5c946cd5ff-n7x9t" Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.234568 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5c946cd5ff-n7x9t" Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.715295 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" podStartSLOduration=4.715277553 podStartE2EDuration="4.715277553s" podCreationTimestamp="2025-09-30 12:37:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:37:56.147634965 +0000 UTC m=+1050.397317121" watchObservedRunningTime="2025-09-30 12:37:56.715277553 +0000 UTC m=+1050.964959699" Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.808126 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.808397 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fa77db27-d4ee-44a1-8e63-008f66b34b48" containerName="ceilometer-central-agent" containerID="cri-o://55b233246ee53d4d5a3b8329279bf1dbc9efff7875abe28b2346f065546eb3e4" gracePeriod=30 Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.808453 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fa77db27-d4ee-44a1-8e63-008f66b34b48" containerName="sg-core" containerID="cri-o://f9379ff97f831462614e289dd46f9b8c895d82de5eac97e80ea92c221b895960" gracePeriod=30 Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.808539 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fa77db27-d4ee-44a1-8e63-008f66b34b48" containerName="proxy-httpd" containerID="cri-o://c36949a6b43b6c3f905e7f9919a3b739c026f04aa774295ff284f8de300b226b" gracePeriod=30 Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.808479 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fa77db27-d4ee-44a1-8e63-008f66b34b48" containerName="ceilometer-notification-agent" containerID="cri-o://04755585fe16d612a1b3517199059a4a28944e1c3c9252e203697dc993ad8e7b" gracePeriod=30 Sep 30 12:37:56 crc kubenswrapper[5002]: I0930 12:37:56.941415 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5c946cd5ff-n7x9t"] Sep 30 12:37:56 crc kubenswrapper[5002]: W0930 12:37:56.946760 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1df12b89_f778_45b2_b39a_c95700262b6e.slice/crio-fa8aa06fab38a0a3b2b20486f31232a4faf8f3c303e4a10531b455ead274c0aa WatchSource:0}: Error finding container fa8aa06fab38a0a3b2b20486f31232a4faf8f3c303e4a10531b455ead274c0aa: Status 404 returned error can't find the container with id fa8aa06fab38a0a3b2b20486f31232a4faf8f3c303e4a10531b455ead274c0aa Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.113436 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5c946cd5ff-n7x9t" event={"ID":"1df12b89-f778-45b2-b39a-c95700262b6e","Type":"ContainerStarted","Data":"fa8aa06fab38a0a3b2b20486f31232a4faf8f3c303e4a10531b455ead274c0aa"} Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.119896 5002 generic.go:334] "Generic (PLEG): container finished" podID="fa77db27-d4ee-44a1-8e63-008f66b34b48" containerID="f9379ff97f831462614e289dd46f9b8c895d82de5eac97e80ea92c221b895960" exitCode=2 Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.119972 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa77db27-d4ee-44a1-8e63-008f66b34b48","Type":"ContainerDied","Data":"f9379ff97f831462614e289dd46f9b8c895d82de5eac97e80ea92c221b895960"} Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.133584 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-85654c5dc5-xmznd" event={"ID":"c487a894-05f0-4ed3-9b0a-fc5bfbae3f74","Type":"ContainerStarted","Data":"24efbf9756dfa0861ab09396f49998a7796bc686df5ada91a4a08cb77e8106f7"} Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.133663 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-85654c5dc5-xmznd" event={"ID":"c487a894-05f0-4ed3-9b0a-fc5bfbae3f74","Type":"ContainerStarted","Data":"0b8be0e8401d09d80524b005fc737c05e89940a8f0a5cbc78b335b9148678062"} Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.133680 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.133693 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.169097 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-85654c5dc5-xmznd" podStartSLOduration=4.169075073 podStartE2EDuration="4.169075073s" podCreationTimestamp="2025-09-30 12:37:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:37:57.16425213 +0000 UTC m=+1051.413934286" watchObservedRunningTime="2025-09-30 12:37:57.169075073 +0000 UTC m=+1051.418757229" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.199159 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-858c4cb9d6-g5ts6"] Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.201138 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-858c4cb9d6-g5ts6" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.204790 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.205450 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.293177 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-858c4cb9d6-g5ts6"] Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.343857 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed88e034-9e24-4611-9d19-90530ff3f7b1-public-tls-certs\") pod \"barbican-api-858c4cb9d6-g5ts6\" (UID: \"ed88e034-9e24-4611-9d19-90530ff3f7b1\") " pod="openstack/barbican-api-858c4cb9d6-g5ts6" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.343903 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed88e034-9e24-4611-9d19-90530ff3f7b1-combined-ca-bundle\") pod \"barbican-api-858c4cb9d6-g5ts6\" (UID: \"ed88e034-9e24-4611-9d19-90530ff3f7b1\") " pod="openstack/barbican-api-858c4cb9d6-g5ts6" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.343954 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndcvn\" (UniqueName: \"kubernetes.io/projected/ed88e034-9e24-4611-9d19-90530ff3f7b1-kube-api-access-ndcvn\") pod \"barbican-api-858c4cb9d6-g5ts6\" (UID: \"ed88e034-9e24-4611-9d19-90530ff3f7b1\") " pod="openstack/barbican-api-858c4cb9d6-g5ts6" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.344032 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed88e034-9e24-4611-9d19-90530ff3f7b1-logs\") pod \"barbican-api-858c4cb9d6-g5ts6\" (UID: \"ed88e034-9e24-4611-9d19-90530ff3f7b1\") " pod="openstack/barbican-api-858c4cb9d6-g5ts6" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.344099 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed88e034-9e24-4611-9d19-90530ff3f7b1-config-data\") pod \"barbican-api-858c4cb9d6-g5ts6\" (UID: \"ed88e034-9e24-4611-9d19-90530ff3f7b1\") " pod="openstack/barbican-api-858c4cb9d6-g5ts6" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.344128 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed88e034-9e24-4611-9d19-90530ff3f7b1-internal-tls-certs\") pod \"barbican-api-858c4cb9d6-g5ts6\" (UID: \"ed88e034-9e24-4611-9d19-90530ff3f7b1\") " pod="openstack/barbican-api-858c4cb9d6-g5ts6" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.344241 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ed88e034-9e24-4611-9d19-90530ff3f7b1-config-data-custom\") pod \"barbican-api-858c4cb9d6-g5ts6\" (UID: \"ed88e034-9e24-4611-9d19-90530ff3f7b1\") " pod="openstack/barbican-api-858c4cb9d6-g5ts6" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.445624 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ed88e034-9e24-4611-9d19-90530ff3f7b1-config-data-custom\") pod \"barbican-api-858c4cb9d6-g5ts6\" (UID: \"ed88e034-9e24-4611-9d19-90530ff3f7b1\") " pod="openstack/barbican-api-858c4cb9d6-g5ts6" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.445710 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed88e034-9e24-4611-9d19-90530ff3f7b1-public-tls-certs\") pod \"barbican-api-858c4cb9d6-g5ts6\" (UID: \"ed88e034-9e24-4611-9d19-90530ff3f7b1\") " pod="openstack/barbican-api-858c4cb9d6-g5ts6" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.445749 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed88e034-9e24-4611-9d19-90530ff3f7b1-combined-ca-bundle\") pod \"barbican-api-858c4cb9d6-g5ts6\" (UID: \"ed88e034-9e24-4611-9d19-90530ff3f7b1\") " pod="openstack/barbican-api-858c4cb9d6-g5ts6" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.445769 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndcvn\" (UniqueName: \"kubernetes.io/projected/ed88e034-9e24-4611-9d19-90530ff3f7b1-kube-api-access-ndcvn\") pod \"barbican-api-858c4cb9d6-g5ts6\" (UID: \"ed88e034-9e24-4611-9d19-90530ff3f7b1\") " pod="openstack/barbican-api-858c4cb9d6-g5ts6" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.445830 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed88e034-9e24-4611-9d19-90530ff3f7b1-logs\") pod \"barbican-api-858c4cb9d6-g5ts6\" (UID: \"ed88e034-9e24-4611-9d19-90530ff3f7b1\") " pod="openstack/barbican-api-858c4cb9d6-g5ts6" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.445870 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed88e034-9e24-4611-9d19-90530ff3f7b1-config-data\") pod \"barbican-api-858c4cb9d6-g5ts6\" (UID: \"ed88e034-9e24-4611-9d19-90530ff3f7b1\") " pod="openstack/barbican-api-858c4cb9d6-g5ts6" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.445904 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed88e034-9e24-4611-9d19-90530ff3f7b1-internal-tls-certs\") pod \"barbican-api-858c4cb9d6-g5ts6\" (UID: \"ed88e034-9e24-4611-9d19-90530ff3f7b1\") " pod="openstack/barbican-api-858c4cb9d6-g5ts6" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.446977 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed88e034-9e24-4611-9d19-90530ff3f7b1-logs\") pod \"barbican-api-858c4cb9d6-g5ts6\" (UID: \"ed88e034-9e24-4611-9d19-90530ff3f7b1\") " pod="openstack/barbican-api-858c4cb9d6-g5ts6" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.461743 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed88e034-9e24-4611-9d19-90530ff3f7b1-public-tls-certs\") pod \"barbican-api-858c4cb9d6-g5ts6\" (UID: \"ed88e034-9e24-4611-9d19-90530ff3f7b1\") " pod="openstack/barbican-api-858c4cb9d6-g5ts6" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.461930 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ed88e034-9e24-4611-9d19-90530ff3f7b1-config-data-custom\") pod \"barbican-api-858c4cb9d6-g5ts6\" (UID: \"ed88e034-9e24-4611-9d19-90530ff3f7b1\") " pod="openstack/barbican-api-858c4cb9d6-g5ts6" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.461945 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed88e034-9e24-4611-9d19-90530ff3f7b1-combined-ca-bundle\") pod \"barbican-api-858c4cb9d6-g5ts6\" (UID: \"ed88e034-9e24-4611-9d19-90530ff3f7b1\") " pod="openstack/barbican-api-858c4cb9d6-g5ts6" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.463814 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed88e034-9e24-4611-9d19-90530ff3f7b1-internal-tls-certs\") pod \"barbican-api-858c4cb9d6-g5ts6\" (UID: \"ed88e034-9e24-4611-9d19-90530ff3f7b1\") " pod="openstack/barbican-api-858c4cb9d6-g5ts6" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.467689 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed88e034-9e24-4611-9d19-90530ff3f7b1-config-data\") pod \"barbican-api-858c4cb9d6-g5ts6\" (UID: \"ed88e034-9e24-4611-9d19-90530ff3f7b1\") " pod="openstack/barbican-api-858c4cb9d6-g5ts6" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.469557 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndcvn\" (UniqueName: \"kubernetes.io/projected/ed88e034-9e24-4611-9d19-90530ff3f7b1-kube-api-access-ndcvn\") pod \"barbican-api-858c4cb9d6-g5ts6\" (UID: \"ed88e034-9e24-4611-9d19-90530ff3f7b1\") " pod="openstack/barbican-api-858c4cb9d6-g5ts6" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.538313 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-858c4cb9d6-g5ts6" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.901001 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.961726 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa77db27-d4ee-44a1-8e63-008f66b34b48-scripts\") pod \"fa77db27-d4ee-44a1-8e63-008f66b34b48\" (UID: \"fa77db27-d4ee-44a1-8e63-008f66b34b48\") " Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.961852 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa77db27-d4ee-44a1-8e63-008f66b34b48-config-data\") pod \"fa77db27-d4ee-44a1-8e63-008f66b34b48\" (UID: \"fa77db27-d4ee-44a1-8e63-008f66b34b48\") " Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.961877 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s8tj5\" (UniqueName: \"kubernetes.io/projected/fa77db27-d4ee-44a1-8e63-008f66b34b48-kube-api-access-s8tj5\") pod \"fa77db27-d4ee-44a1-8e63-008f66b34b48\" (UID: \"fa77db27-d4ee-44a1-8e63-008f66b34b48\") " Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.961898 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa77db27-d4ee-44a1-8e63-008f66b34b48-combined-ca-bundle\") pod \"fa77db27-d4ee-44a1-8e63-008f66b34b48\" (UID: \"fa77db27-d4ee-44a1-8e63-008f66b34b48\") " Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.961950 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fa77db27-d4ee-44a1-8e63-008f66b34b48-sg-core-conf-yaml\") pod \"fa77db27-d4ee-44a1-8e63-008f66b34b48\" (UID: \"fa77db27-d4ee-44a1-8e63-008f66b34b48\") " Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.961993 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa77db27-d4ee-44a1-8e63-008f66b34b48-log-httpd\") pod \"fa77db27-d4ee-44a1-8e63-008f66b34b48\" (UID: \"fa77db27-d4ee-44a1-8e63-008f66b34b48\") " Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.962080 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa77db27-d4ee-44a1-8e63-008f66b34b48-run-httpd\") pod \"fa77db27-d4ee-44a1-8e63-008f66b34b48\" (UID: \"fa77db27-d4ee-44a1-8e63-008f66b34b48\") " Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.962750 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa77db27-d4ee-44a1-8e63-008f66b34b48-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "fa77db27-d4ee-44a1-8e63-008f66b34b48" (UID: "fa77db27-d4ee-44a1-8e63-008f66b34b48"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.963273 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa77db27-d4ee-44a1-8e63-008f66b34b48-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "fa77db27-d4ee-44a1-8e63-008f66b34b48" (UID: "fa77db27-d4ee-44a1-8e63-008f66b34b48"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:37:57 crc kubenswrapper[5002]: I0930 12:37:57.985598 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa77db27-d4ee-44a1-8e63-008f66b34b48-scripts" (OuterVolumeSpecName: "scripts") pod "fa77db27-d4ee-44a1-8e63-008f66b34b48" (UID: "fa77db27-d4ee-44a1-8e63-008f66b34b48"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.016299 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa77db27-d4ee-44a1-8e63-008f66b34b48-kube-api-access-s8tj5" (OuterVolumeSpecName: "kube-api-access-s8tj5") pod "fa77db27-d4ee-44a1-8e63-008f66b34b48" (UID: "fa77db27-d4ee-44a1-8e63-008f66b34b48"). InnerVolumeSpecName "kube-api-access-s8tj5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.040929 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa77db27-d4ee-44a1-8e63-008f66b34b48-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "fa77db27-d4ee-44a1-8e63-008f66b34b48" (UID: "fa77db27-d4ee-44a1-8e63-008f66b34b48"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.065418 5002 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa77db27-d4ee-44a1-8e63-008f66b34b48-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.065449 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa77db27-d4ee-44a1-8e63-008f66b34b48-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.065458 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s8tj5\" (UniqueName: \"kubernetes.io/projected/fa77db27-d4ee-44a1-8e63-008f66b34b48-kube-api-access-s8tj5\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.065485 5002 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fa77db27-d4ee-44a1-8e63-008f66b34b48-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.065494 5002 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa77db27-d4ee-44a1-8e63-008f66b34b48-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.147098 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-858c4cb9d6-g5ts6"] Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.163807 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa77db27-d4ee-44a1-8e63-008f66b34b48-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fa77db27-d4ee-44a1-8e63-008f66b34b48" (UID: "fa77db27-d4ee-44a1-8e63-008f66b34b48"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.167227 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa77db27-d4ee-44a1-8e63-008f66b34b48-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.168168 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5c946cd5ff-n7x9t" event={"ID":"1df12b89-f778-45b2-b39a-c95700262b6e","Type":"ContainerStarted","Data":"6b0469d0a7475c6b327745c3ddd22f595a673dce2cd6faedbccdd0f82aca4af5"} Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.168244 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5c946cd5ff-n7x9t" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.168259 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5c946cd5ff-n7x9t" event={"ID":"1df12b89-f778-45b2-b39a-c95700262b6e","Type":"ContainerStarted","Data":"5170b3cc068c4928da00d4b0ce4712663e2df10ae55e8d09d39c267ab02fbac4"} Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.183106 5002 generic.go:334] "Generic (PLEG): container finished" podID="fa77db27-d4ee-44a1-8e63-008f66b34b48" containerID="c36949a6b43b6c3f905e7f9919a3b739c026f04aa774295ff284f8de300b226b" exitCode=0 Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.183145 5002 generic.go:334] "Generic (PLEG): container finished" podID="fa77db27-d4ee-44a1-8e63-008f66b34b48" containerID="04755585fe16d612a1b3517199059a4a28944e1c3c9252e203697dc993ad8e7b" exitCode=0 Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.183153 5002 generic.go:334] "Generic (PLEG): container finished" podID="fa77db27-d4ee-44a1-8e63-008f66b34b48" containerID="55b233246ee53d4d5a3b8329279bf1dbc9efff7875abe28b2346f065546eb3e4" exitCode=0 Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.183866 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa77db27-d4ee-44a1-8e63-008f66b34b48","Type":"ContainerDied","Data":"c36949a6b43b6c3f905e7f9919a3b739c026f04aa774295ff284f8de300b226b"} Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.183918 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa77db27-d4ee-44a1-8e63-008f66b34b48","Type":"ContainerDied","Data":"04755585fe16d612a1b3517199059a4a28944e1c3c9252e203697dc993ad8e7b"} Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.183928 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa77db27-d4ee-44a1-8e63-008f66b34b48","Type":"ContainerDied","Data":"55b233246ee53d4d5a3b8329279bf1dbc9efff7875abe28b2346f065546eb3e4"} Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.183937 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa77db27-d4ee-44a1-8e63-008f66b34b48","Type":"ContainerDied","Data":"be5083d9672a96867d239e51a660b1020853d3dccf673128f2f84315ed36f414"} Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.183953 5002 scope.go:117] "RemoveContainer" containerID="c36949a6b43b6c3f905e7f9919a3b739c026f04aa774295ff284f8de300b226b" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.184646 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.191202 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5c946cd5ff-n7x9t" podStartSLOduration=3.191182137 podStartE2EDuration="3.191182137s" podCreationTimestamp="2025-09-30 12:37:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:37:58.184811395 +0000 UTC m=+1052.434493541" watchObservedRunningTime="2025-09-30 12:37:58.191182137 +0000 UTC m=+1052.440864283" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.201418 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa77db27-d4ee-44a1-8e63-008f66b34b48-config-data" (OuterVolumeSpecName: "config-data") pod "fa77db27-d4ee-44a1-8e63-008f66b34b48" (UID: "fa77db27-d4ee-44a1-8e63-008f66b34b48"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.269597 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa77db27-d4ee-44a1-8e63-008f66b34b48-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.296600 5002 scope.go:117] "RemoveContainer" containerID="f9379ff97f831462614e289dd46f9b8c895d82de5eac97e80ea92c221b895960" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.359742 5002 scope.go:117] "RemoveContainer" containerID="04755585fe16d612a1b3517199059a4a28944e1c3c9252e203697dc993ad8e7b" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.385138 5002 scope.go:117] "RemoveContainer" containerID="55b233246ee53d4d5a3b8329279bf1dbc9efff7875abe28b2346f065546eb3e4" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.403887 5002 scope.go:117] "RemoveContainer" containerID="c36949a6b43b6c3f905e7f9919a3b739c026f04aa774295ff284f8de300b226b" Sep 30 12:37:58 crc kubenswrapper[5002]: E0930 12:37:58.404997 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c36949a6b43b6c3f905e7f9919a3b739c026f04aa774295ff284f8de300b226b\": container with ID starting with c36949a6b43b6c3f905e7f9919a3b739c026f04aa774295ff284f8de300b226b not found: ID does not exist" containerID="c36949a6b43b6c3f905e7f9919a3b739c026f04aa774295ff284f8de300b226b" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.405028 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c36949a6b43b6c3f905e7f9919a3b739c026f04aa774295ff284f8de300b226b"} err="failed to get container status \"c36949a6b43b6c3f905e7f9919a3b739c026f04aa774295ff284f8de300b226b\": rpc error: code = NotFound desc = could not find container \"c36949a6b43b6c3f905e7f9919a3b739c026f04aa774295ff284f8de300b226b\": container with ID starting with c36949a6b43b6c3f905e7f9919a3b739c026f04aa774295ff284f8de300b226b not found: ID does not exist" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.405047 5002 scope.go:117] "RemoveContainer" containerID="f9379ff97f831462614e289dd46f9b8c895d82de5eac97e80ea92c221b895960" Sep 30 12:37:58 crc kubenswrapper[5002]: E0930 12:37:58.405417 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9379ff97f831462614e289dd46f9b8c895d82de5eac97e80ea92c221b895960\": container with ID starting with f9379ff97f831462614e289dd46f9b8c895d82de5eac97e80ea92c221b895960 not found: ID does not exist" containerID="f9379ff97f831462614e289dd46f9b8c895d82de5eac97e80ea92c221b895960" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.405438 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9379ff97f831462614e289dd46f9b8c895d82de5eac97e80ea92c221b895960"} err="failed to get container status \"f9379ff97f831462614e289dd46f9b8c895d82de5eac97e80ea92c221b895960\": rpc error: code = NotFound desc = could not find container \"f9379ff97f831462614e289dd46f9b8c895d82de5eac97e80ea92c221b895960\": container with ID starting with f9379ff97f831462614e289dd46f9b8c895d82de5eac97e80ea92c221b895960 not found: ID does not exist" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.405451 5002 scope.go:117] "RemoveContainer" containerID="04755585fe16d612a1b3517199059a4a28944e1c3c9252e203697dc993ad8e7b" Sep 30 12:37:58 crc kubenswrapper[5002]: E0930 12:37:58.405857 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04755585fe16d612a1b3517199059a4a28944e1c3c9252e203697dc993ad8e7b\": container with ID starting with 04755585fe16d612a1b3517199059a4a28944e1c3c9252e203697dc993ad8e7b not found: ID does not exist" containerID="04755585fe16d612a1b3517199059a4a28944e1c3c9252e203697dc993ad8e7b" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.405875 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04755585fe16d612a1b3517199059a4a28944e1c3c9252e203697dc993ad8e7b"} err="failed to get container status \"04755585fe16d612a1b3517199059a4a28944e1c3c9252e203697dc993ad8e7b\": rpc error: code = NotFound desc = could not find container \"04755585fe16d612a1b3517199059a4a28944e1c3c9252e203697dc993ad8e7b\": container with ID starting with 04755585fe16d612a1b3517199059a4a28944e1c3c9252e203697dc993ad8e7b not found: ID does not exist" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.405888 5002 scope.go:117] "RemoveContainer" containerID="55b233246ee53d4d5a3b8329279bf1dbc9efff7875abe28b2346f065546eb3e4" Sep 30 12:37:58 crc kubenswrapper[5002]: E0930 12:37:58.406767 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55b233246ee53d4d5a3b8329279bf1dbc9efff7875abe28b2346f065546eb3e4\": container with ID starting with 55b233246ee53d4d5a3b8329279bf1dbc9efff7875abe28b2346f065546eb3e4 not found: ID does not exist" containerID="55b233246ee53d4d5a3b8329279bf1dbc9efff7875abe28b2346f065546eb3e4" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.406795 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55b233246ee53d4d5a3b8329279bf1dbc9efff7875abe28b2346f065546eb3e4"} err="failed to get container status \"55b233246ee53d4d5a3b8329279bf1dbc9efff7875abe28b2346f065546eb3e4\": rpc error: code = NotFound desc = could not find container \"55b233246ee53d4d5a3b8329279bf1dbc9efff7875abe28b2346f065546eb3e4\": container with ID starting with 55b233246ee53d4d5a3b8329279bf1dbc9efff7875abe28b2346f065546eb3e4 not found: ID does not exist" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.406809 5002 scope.go:117] "RemoveContainer" containerID="c36949a6b43b6c3f905e7f9919a3b739c026f04aa774295ff284f8de300b226b" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.407022 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c36949a6b43b6c3f905e7f9919a3b739c026f04aa774295ff284f8de300b226b"} err="failed to get container status \"c36949a6b43b6c3f905e7f9919a3b739c026f04aa774295ff284f8de300b226b\": rpc error: code = NotFound desc = could not find container \"c36949a6b43b6c3f905e7f9919a3b739c026f04aa774295ff284f8de300b226b\": container with ID starting with c36949a6b43b6c3f905e7f9919a3b739c026f04aa774295ff284f8de300b226b not found: ID does not exist" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.407040 5002 scope.go:117] "RemoveContainer" containerID="f9379ff97f831462614e289dd46f9b8c895d82de5eac97e80ea92c221b895960" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.407235 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9379ff97f831462614e289dd46f9b8c895d82de5eac97e80ea92c221b895960"} err="failed to get container status \"f9379ff97f831462614e289dd46f9b8c895d82de5eac97e80ea92c221b895960\": rpc error: code = NotFound desc = could not find container \"f9379ff97f831462614e289dd46f9b8c895d82de5eac97e80ea92c221b895960\": container with ID starting with f9379ff97f831462614e289dd46f9b8c895d82de5eac97e80ea92c221b895960 not found: ID does not exist" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.407257 5002 scope.go:117] "RemoveContainer" containerID="04755585fe16d612a1b3517199059a4a28944e1c3c9252e203697dc993ad8e7b" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.407421 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04755585fe16d612a1b3517199059a4a28944e1c3c9252e203697dc993ad8e7b"} err="failed to get container status \"04755585fe16d612a1b3517199059a4a28944e1c3c9252e203697dc993ad8e7b\": rpc error: code = NotFound desc = could not find container \"04755585fe16d612a1b3517199059a4a28944e1c3c9252e203697dc993ad8e7b\": container with ID starting with 04755585fe16d612a1b3517199059a4a28944e1c3c9252e203697dc993ad8e7b not found: ID does not exist" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.407441 5002 scope.go:117] "RemoveContainer" containerID="55b233246ee53d4d5a3b8329279bf1dbc9efff7875abe28b2346f065546eb3e4" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.407764 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55b233246ee53d4d5a3b8329279bf1dbc9efff7875abe28b2346f065546eb3e4"} err="failed to get container status \"55b233246ee53d4d5a3b8329279bf1dbc9efff7875abe28b2346f065546eb3e4\": rpc error: code = NotFound desc = could not find container \"55b233246ee53d4d5a3b8329279bf1dbc9efff7875abe28b2346f065546eb3e4\": container with ID starting with 55b233246ee53d4d5a3b8329279bf1dbc9efff7875abe28b2346f065546eb3e4 not found: ID does not exist" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.407782 5002 scope.go:117] "RemoveContainer" containerID="c36949a6b43b6c3f905e7f9919a3b739c026f04aa774295ff284f8de300b226b" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.408030 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c36949a6b43b6c3f905e7f9919a3b739c026f04aa774295ff284f8de300b226b"} err="failed to get container status \"c36949a6b43b6c3f905e7f9919a3b739c026f04aa774295ff284f8de300b226b\": rpc error: code = NotFound desc = could not find container \"c36949a6b43b6c3f905e7f9919a3b739c026f04aa774295ff284f8de300b226b\": container with ID starting with c36949a6b43b6c3f905e7f9919a3b739c026f04aa774295ff284f8de300b226b not found: ID does not exist" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.408054 5002 scope.go:117] "RemoveContainer" containerID="f9379ff97f831462614e289dd46f9b8c895d82de5eac97e80ea92c221b895960" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.408247 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9379ff97f831462614e289dd46f9b8c895d82de5eac97e80ea92c221b895960"} err="failed to get container status \"f9379ff97f831462614e289dd46f9b8c895d82de5eac97e80ea92c221b895960\": rpc error: code = NotFound desc = could not find container \"f9379ff97f831462614e289dd46f9b8c895d82de5eac97e80ea92c221b895960\": container with ID starting with f9379ff97f831462614e289dd46f9b8c895d82de5eac97e80ea92c221b895960 not found: ID does not exist" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.408264 5002 scope.go:117] "RemoveContainer" containerID="04755585fe16d612a1b3517199059a4a28944e1c3c9252e203697dc993ad8e7b" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.408408 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04755585fe16d612a1b3517199059a4a28944e1c3c9252e203697dc993ad8e7b"} err="failed to get container status \"04755585fe16d612a1b3517199059a4a28944e1c3c9252e203697dc993ad8e7b\": rpc error: code = NotFound desc = could not find container \"04755585fe16d612a1b3517199059a4a28944e1c3c9252e203697dc993ad8e7b\": container with ID starting with 04755585fe16d612a1b3517199059a4a28944e1c3c9252e203697dc993ad8e7b not found: ID does not exist" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.408426 5002 scope.go:117] "RemoveContainer" containerID="55b233246ee53d4d5a3b8329279bf1dbc9efff7875abe28b2346f065546eb3e4" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.408978 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55b233246ee53d4d5a3b8329279bf1dbc9efff7875abe28b2346f065546eb3e4"} err="failed to get container status \"55b233246ee53d4d5a3b8329279bf1dbc9efff7875abe28b2346f065546eb3e4\": rpc error: code = NotFound desc = could not find container \"55b233246ee53d4d5a3b8329279bf1dbc9efff7875abe28b2346f065546eb3e4\": container with ID starting with 55b233246ee53d4d5a3b8329279bf1dbc9efff7875abe28b2346f065546eb3e4 not found: ID does not exist" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.536453 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.550392 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.559177 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:37:58 crc kubenswrapper[5002]: E0930 12:37:58.559752 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa77db27-d4ee-44a1-8e63-008f66b34b48" containerName="sg-core" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.559773 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa77db27-d4ee-44a1-8e63-008f66b34b48" containerName="sg-core" Sep 30 12:37:58 crc kubenswrapper[5002]: E0930 12:37:58.559794 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa77db27-d4ee-44a1-8e63-008f66b34b48" containerName="ceilometer-central-agent" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.559804 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa77db27-d4ee-44a1-8e63-008f66b34b48" containerName="ceilometer-central-agent" Sep 30 12:37:58 crc kubenswrapper[5002]: E0930 12:37:58.559816 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa77db27-d4ee-44a1-8e63-008f66b34b48" containerName="proxy-httpd" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.559825 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa77db27-d4ee-44a1-8e63-008f66b34b48" containerName="proxy-httpd" Sep 30 12:37:58 crc kubenswrapper[5002]: E0930 12:37:58.559843 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa77db27-d4ee-44a1-8e63-008f66b34b48" containerName="ceilometer-notification-agent" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.559853 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa77db27-d4ee-44a1-8e63-008f66b34b48" containerName="ceilometer-notification-agent" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.560089 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa77db27-d4ee-44a1-8e63-008f66b34b48" containerName="proxy-httpd" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.560122 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa77db27-d4ee-44a1-8e63-008f66b34b48" containerName="ceilometer-central-agent" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.560152 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa77db27-d4ee-44a1-8e63-008f66b34b48" containerName="sg-core" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.560165 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa77db27-d4ee-44a1-8e63-008f66b34b48" containerName="ceilometer-notification-agent" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.562676 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.567864 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.570049 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.572245 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.681607 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z45jl\" (UniqueName: \"kubernetes.io/projected/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-kube-api-access-z45jl\") pod \"ceilometer-0\" (UID: \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\") " pod="openstack/ceilometer-0" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.681706 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-run-httpd\") pod \"ceilometer-0\" (UID: \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\") " pod="openstack/ceilometer-0" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.681757 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-config-data\") pod \"ceilometer-0\" (UID: \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\") " pod="openstack/ceilometer-0" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.681811 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-log-httpd\") pod \"ceilometer-0\" (UID: \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\") " pod="openstack/ceilometer-0" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.681835 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-scripts\") pod \"ceilometer-0\" (UID: \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\") " pod="openstack/ceilometer-0" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.681858 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\") " pod="openstack/ceilometer-0" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.681897 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\") " pod="openstack/ceilometer-0" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.694020 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa77db27-d4ee-44a1-8e63-008f66b34b48" path="/var/lib/kubelet/pods/fa77db27-d4ee-44a1-8e63-008f66b34b48/volumes" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.782912 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\") " pod="openstack/ceilometer-0" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.782977 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z45jl\" (UniqueName: \"kubernetes.io/projected/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-kube-api-access-z45jl\") pod \"ceilometer-0\" (UID: \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\") " pod="openstack/ceilometer-0" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.783027 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-run-httpd\") pod \"ceilometer-0\" (UID: \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\") " pod="openstack/ceilometer-0" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.783056 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-config-data\") pod \"ceilometer-0\" (UID: \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\") " pod="openstack/ceilometer-0" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.783114 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-log-httpd\") pod \"ceilometer-0\" (UID: \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\") " pod="openstack/ceilometer-0" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.783138 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-scripts\") pod \"ceilometer-0\" (UID: \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\") " pod="openstack/ceilometer-0" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.783177 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\") " pod="openstack/ceilometer-0" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.787033 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-log-httpd\") pod \"ceilometer-0\" (UID: \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\") " pod="openstack/ceilometer-0" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.787200 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-run-httpd\") pod \"ceilometer-0\" (UID: \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\") " pod="openstack/ceilometer-0" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.789912 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-config-data\") pod \"ceilometer-0\" (UID: \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\") " pod="openstack/ceilometer-0" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.790236 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\") " pod="openstack/ceilometer-0" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.792093 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-scripts\") pod \"ceilometer-0\" (UID: \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\") " pod="openstack/ceilometer-0" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.792315 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\") " pod="openstack/ceilometer-0" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.807571 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z45jl\" (UniqueName: \"kubernetes.io/projected/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-kube-api-access-z45jl\") pod \"ceilometer-0\" (UID: \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\") " pod="openstack/ceilometer-0" Sep 30 12:37:58 crc kubenswrapper[5002]: I0930 12:37:58.957987 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:37:59 crc kubenswrapper[5002]: I0930 12:37:59.195378 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-858c4cb9d6-g5ts6" event={"ID":"ed88e034-9e24-4611-9d19-90530ff3f7b1","Type":"ContainerStarted","Data":"9e12f29dec7efd797d38b55ae7d581bce955e1d6aa6f98b351ebca944a8d37e1"} Sep 30 12:37:59 crc kubenswrapper[5002]: I0930 12:37:59.195694 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-858c4cb9d6-g5ts6" event={"ID":"ed88e034-9e24-4611-9d19-90530ff3f7b1","Type":"ContainerStarted","Data":"b89e03ddfe3ff2a73540f7d887e58bc0deee6bbcacf1298ecf750df5e5526741"} Sep 30 12:37:59 crc kubenswrapper[5002]: I0930 12:37:59.195709 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-858c4cb9d6-g5ts6" event={"ID":"ed88e034-9e24-4611-9d19-90530ff3f7b1","Type":"ContainerStarted","Data":"8808a0da01dfa14a0f519676a4a05f9733ff97965ea0e9bf3f04e7f8b30f4a54"} Sep 30 12:37:59 crc kubenswrapper[5002]: I0930 12:37:59.419743 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:37:59 crc kubenswrapper[5002]: W0930 12:37:59.456227 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod104a5fb7_2a8a_4274_9c0c_84539d4b5c25.slice/crio-07e08889cc4dd4f33688980cb7fbbc8f3d29925062744af37083ba9c0cbe7e7c WatchSource:0}: Error finding container 07e08889cc4dd4f33688980cb7fbbc8f3d29925062744af37083ba9c0cbe7e7c: Status 404 returned error can't find the container with id 07e08889cc4dd4f33688980cb7fbbc8f3d29925062744af37083ba9c0cbe7e7c Sep 30 12:38:00 crc kubenswrapper[5002]: I0930 12:38:00.204638 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"104a5fb7-2a8a-4274-9c0c-84539d4b5c25","Type":"ContainerStarted","Data":"07e08889cc4dd4f33688980cb7fbbc8f3d29925062744af37083ba9c0cbe7e7c"} Sep 30 12:38:00 crc kubenswrapper[5002]: I0930 12:38:00.204751 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-858c4cb9d6-g5ts6" Sep 30 12:38:00 crc kubenswrapper[5002]: I0930 12:38:00.204794 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-858c4cb9d6-g5ts6" Sep 30 12:38:00 crc kubenswrapper[5002]: I0930 12:38:00.236289 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-858c4cb9d6-g5ts6" podStartSLOduration=3.236267227 podStartE2EDuration="3.236267227s" podCreationTimestamp="2025-09-30 12:37:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:38:00.222338303 +0000 UTC m=+1054.472020469" watchObservedRunningTime="2025-09-30 12:38:00.236267227 +0000 UTC m=+1054.485949373" Sep 30 12:38:02 crc kubenswrapper[5002]: I0930 12:38:02.098436 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:38:02 crc kubenswrapper[5002]: I0930 12:38:02.098781 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:38:02 crc kubenswrapper[5002]: I0930 12:38:02.473981 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" Sep 30 12:38:02 crc kubenswrapper[5002]: I0930 12:38:02.547702 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-frwpd"] Sep 30 12:38:02 crc kubenswrapper[5002]: I0930 12:38:02.547963 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" podUID="e6c79eff-4c2e-42b9-af82-df66c7091400" containerName="dnsmasq-dns" containerID="cri-o://6a0976a3981404b2496dbd01df6e6cf6820c9aa403d6ea3e0c4f6fd651927352" gracePeriod=10 Sep 30 12:38:03 crc kubenswrapper[5002]: I0930 12:38:03.155547 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" podUID="e6c79eff-4c2e-42b9-af82-df66c7091400" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.139:5353: connect: connection refused" Sep 30 12:38:03 crc kubenswrapper[5002]: I0930 12:38:03.880997 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-f8777b74-fpwh2" podUID="fd4d1d88-c894-496a-b2ee-00bf80fa2415" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Sep 30 12:38:03 crc kubenswrapper[5002]: I0930 12:38:03.881415 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:38:04 crc kubenswrapper[5002]: I0930 12:38:04.446771 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:38:04 crc kubenswrapper[5002]: I0930 12:38:04.449114 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-85654c5dc5-xmznd" Sep 30 12:38:04 crc kubenswrapper[5002]: I0930 12:38:04.944788 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-86cd8676db-mltgz" podUID="beec5517-fd48-4017-9cb3-1102001e9439" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 12:38:05 crc kubenswrapper[5002]: I0930 12:38:05.264013 5002 generic.go:334] "Generic (PLEG): container finished" podID="e6c79eff-4c2e-42b9-af82-df66c7091400" containerID="6a0976a3981404b2496dbd01df6e6cf6820c9aa403d6ea3e0c4f6fd651927352" exitCode=0 Sep 30 12:38:05 crc kubenswrapper[5002]: I0930 12:38:05.264359 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" event={"ID":"e6c79eff-4c2e-42b9-af82-df66c7091400","Type":"ContainerDied","Data":"6a0976a3981404b2496dbd01df6e6cf6820c9aa403d6ea3e0c4f6fd651927352"} Sep 30 12:38:05 crc kubenswrapper[5002]: I0930 12:38:05.893840 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-86cd8676db-mltgz" podUID="beec5517-fd48-4017-9cb3-1102001e9439" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 12:38:05 crc kubenswrapper[5002]: I0930 12:38:05.986741 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-86cd8676db-mltgz" podUID="beec5517-fd48-4017-9cb3-1102001e9439" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 12:38:05 crc kubenswrapper[5002]: I0930 12:38:05.986756 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-86cd8676db-mltgz" podUID="beec5517-fd48-4017-9cb3-1102001e9439" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 12:38:05 crc kubenswrapper[5002]: I0930 12:38:05.993241 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-86cd8676db-mltgz" Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.108455 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.277117 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-config\") pod \"e6c79eff-4c2e-42b9-af82-df66c7091400\" (UID: \"e6c79eff-4c2e-42b9-af82-df66c7091400\") " Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.277537 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxgtw\" (UniqueName: \"kubernetes.io/projected/e6c79eff-4c2e-42b9-af82-df66c7091400-kube-api-access-zxgtw\") pod \"e6c79eff-4c2e-42b9-af82-df66c7091400\" (UID: \"e6c79eff-4c2e-42b9-af82-df66c7091400\") " Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.277651 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-dns-svc\") pod \"e6c79eff-4c2e-42b9-af82-df66c7091400\" (UID: \"e6c79eff-4c2e-42b9-af82-df66c7091400\") " Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.277705 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-dns-swift-storage-0\") pod \"e6c79eff-4c2e-42b9-af82-df66c7091400\" (UID: \"e6c79eff-4c2e-42b9-af82-df66c7091400\") " Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.277783 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-ovsdbserver-nb\") pod \"e6c79eff-4c2e-42b9-af82-df66c7091400\" (UID: \"e6c79eff-4c2e-42b9-af82-df66c7091400\") " Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.277857 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-ovsdbserver-sb\") pod \"e6c79eff-4c2e-42b9-af82-df66c7091400\" (UID: \"e6c79eff-4c2e-42b9-af82-df66c7091400\") " Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.294733 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6c79eff-4c2e-42b9-af82-df66c7091400-kube-api-access-zxgtw" (OuterVolumeSpecName: "kube-api-access-zxgtw") pod "e6c79eff-4c2e-42b9-af82-df66c7091400" (UID: "e6c79eff-4c2e-42b9-af82-df66c7091400"). InnerVolumeSpecName "kube-api-access-zxgtw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.317861 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" event={"ID":"e6c79eff-4c2e-42b9-af82-df66c7091400","Type":"ContainerDied","Data":"c6a2448193fde244abf77b8eea72555c50e2fd81229b7ac6c202bfc718909763"} Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.318131 5002 scope.go:117] "RemoveContainer" containerID="6a0976a3981404b2496dbd01df6e6cf6820c9aa403d6ea3e0c4f6fd651927352" Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.318292 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-frwpd" Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.334279 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"104a5fb7-2a8a-4274-9c0c-84539d4b5c25","Type":"ContainerStarted","Data":"45b5be152eee9ce326702fbce8687a56b0cf91fc38c9bfa4bf289fa91859cdba"} Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.334684 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-config" (OuterVolumeSpecName: "config") pod "e6c79eff-4c2e-42b9-af82-df66c7091400" (UID: "e6c79eff-4c2e-42b9-af82-df66c7091400"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.346106 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"369a872a-8dd2-409e-9938-2a01cd707dc8","Type":"ContainerStarted","Data":"440db3f6f5ab10a854dd1ad9ce661c8012f73064ceef4ee5e38a9d3fe7b627da"} Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.348038 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-bnf58" event={"ID":"4fdf0c77-68ae-41ff-b6b5-122baa461b8c","Type":"ContainerStarted","Data":"12cdb900bf0fb859526f074db970c8589cc60cb37f68824e568f580a6fdf5c63"} Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.359707 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e6c79eff-4c2e-42b9-af82-df66c7091400" (UID: "e6c79eff-4c2e-42b9-af82-df66c7091400"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.370700 5002 scope.go:117] "RemoveContainer" containerID="d899ae74c228c6148c7d04b499e7e778900039ca21449b5f7f70729d363eed91" Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.376035 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=1.926197742 podStartE2EDuration="17.376019204s" podCreationTimestamp="2025-09-30 12:37:50 +0000 UTC" firstStartedPulling="2025-09-30 12:37:51.392323863 +0000 UTC m=+1045.642006009" lastFinishedPulling="2025-09-30 12:38:06.842145325 +0000 UTC m=+1061.091827471" observedRunningTime="2025-09-30 12:38:07.370582195 +0000 UTC m=+1061.620264361" watchObservedRunningTime="2025-09-30 12:38:07.376019204 +0000 UTC m=+1061.625701350" Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.379954 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.379990 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxgtw\" (UniqueName: \"kubernetes.io/projected/e6c79eff-4c2e-42b9-af82-df66c7091400-kube-api-access-zxgtw\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.379999 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.381728 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e6c79eff-4c2e-42b9-af82-df66c7091400" (UID: "e6c79eff-4c2e-42b9-af82-df66c7091400"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.403440 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-bnf58" podStartSLOduration=2.430169793 podStartE2EDuration="1m25.403419551s" podCreationTimestamp="2025-09-30 12:36:42 +0000 UTC" firstStartedPulling="2025-09-30 12:36:43.868948758 +0000 UTC m=+978.118630904" lastFinishedPulling="2025-09-30 12:38:06.842198516 +0000 UTC m=+1061.091880662" observedRunningTime="2025-09-30 12:38:07.394252968 +0000 UTC m=+1061.643935114" watchObservedRunningTime="2025-09-30 12:38:07.403419551 +0000 UTC m=+1061.653101697" Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.405995 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e6c79eff-4c2e-42b9-af82-df66c7091400" (UID: "e6c79eff-4c2e-42b9-af82-df66c7091400"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.406061 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "e6c79eff-4c2e-42b9-af82-df66c7091400" (UID: "e6c79eff-4c2e-42b9-af82-df66c7091400"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.481733 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.481768 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.481778 5002 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e6c79eff-4c2e-42b9-af82-df66c7091400-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.645492 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-frwpd"] Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.654208 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-frwpd"] Sep 30 12:38:07 crc kubenswrapper[5002]: I0930 12:38:07.894582 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-86cd8676db-mltgz" Sep 30 12:38:08 crc kubenswrapper[5002]: I0930 12:38:08.688850 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6c79eff-4c2e-42b9-af82-df66c7091400" path="/var/lib/kubelet/pods/e6c79eff-4c2e-42b9-af82-df66c7091400/volumes" Sep 30 12:38:09 crc kubenswrapper[5002]: I0930 12:38:09.133711 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:38:09 crc kubenswrapper[5002]: I0930 12:38:09.299456 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-858c4cb9d6-g5ts6" Sep 30 12:38:09 crc kubenswrapper[5002]: I0930 12:38:09.392559 5002 generic.go:334] "Generic (PLEG): container finished" podID="fd4d1d88-c894-496a-b2ee-00bf80fa2415" containerID="20378dc396b5e4c91b822c9380bf06eb3c519550f74b233a99bb346dd6c21296" exitCode=137 Sep 30 12:38:09 crc kubenswrapper[5002]: I0930 12:38:09.392618 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-f8777b74-fpwh2" event={"ID":"fd4d1d88-c894-496a-b2ee-00bf80fa2415","Type":"ContainerDied","Data":"20378dc396b5e4c91b822c9380bf06eb3c519550f74b233a99bb346dd6c21296"} Sep 30 12:38:09 crc kubenswrapper[5002]: I0930 12:38:09.394114 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"104a5fb7-2a8a-4274-9c0c-84539d4b5c25","Type":"ContainerStarted","Data":"bb71f6e3d773533a93b0a891ae606006d2c610a4f16dc3bca663666f79d39d21"} Sep 30 12:38:09 crc kubenswrapper[5002]: I0930 12:38:09.476146 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-858c4cb9d6-g5ts6" Sep 30 12:38:09 crc kubenswrapper[5002]: I0930 12:38:09.533949 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-86cd8676db-mltgz"] Sep 30 12:38:09 crc kubenswrapper[5002]: I0930 12:38:09.534228 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-86cd8676db-mltgz" podUID="beec5517-fd48-4017-9cb3-1102001e9439" containerName="barbican-api-log" containerID="cri-o://3ee31987cac31de7a53839e25d36aa1a3805641e725f6df37d71980d6c990de9" gracePeriod=30 Sep 30 12:38:09 crc kubenswrapper[5002]: I0930 12:38:09.534409 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-86cd8676db-mltgz" podUID="beec5517-fd48-4017-9cb3-1102001e9439" containerName="barbican-api" containerID="cri-o://2ce09c6165125531983dc65ca7e3bf84ffa6db66b1a0a6cdbe41ee301e6b6e0a" gracePeriod=30 Sep 30 12:38:09 crc kubenswrapper[5002]: I0930 12:38:09.926236 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.032687 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fd4d1d88-c894-496a-b2ee-00bf80fa2415-logs\") pod \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\" (UID: \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\") " Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.032758 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd4d1d88-c894-496a-b2ee-00bf80fa2415-horizon-tls-certs\") pod \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\" (UID: \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\") " Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.032784 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fd4d1d88-c894-496a-b2ee-00bf80fa2415-horizon-secret-key\") pod \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\" (UID: \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\") " Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.032820 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fd4d1d88-c894-496a-b2ee-00bf80fa2415-scripts\") pod \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\" (UID: \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\") " Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.033150 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd4d1d88-c894-496a-b2ee-00bf80fa2415-logs" (OuterVolumeSpecName: "logs") pod "fd4d1d88-c894-496a-b2ee-00bf80fa2415" (UID: "fd4d1d88-c894-496a-b2ee-00bf80fa2415"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.033269 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fd4d1d88-c894-496a-b2ee-00bf80fa2415-config-data\") pod \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\" (UID: \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\") " Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.033427 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hlzft\" (UniqueName: \"kubernetes.io/projected/fd4d1d88-c894-496a-b2ee-00bf80fa2415-kube-api-access-hlzft\") pod \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\" (UID: \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\") " Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.033465 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd4d1d88-c894-496a-b2ee-00bf80fa2415-combined-ca-bundle\") pod \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\" (UID: \"fd4d1d88-c894-496a-b2ee-00bf80fa2415\") " Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.035610 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fd4d1d88-c894-496a-b2ee-00bf80fa2415-logs\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.045993 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd4d1d88-c894-496a-b2ee-00bf80fa2415-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "fd4d1d88-c894-496a-b2ee-00bf80fa2415" (UID: "fd4d1d88-c894-496a-b2ee-00bf80fa2415"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.054222 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd4d1d88-c894-496a-b2ee-00bf80fa2415-kube-api-access-hlzft" (OuterVolumeSpecName: "kube-api-access-hlzft") pod "fd4d1d88-c894-496a-b2ee-00bf80fa2415" (UID: "fd4d1d88-c894-496a-b2ee-00bf80fa2415"). InnerVolumeSpecName "kube-api-access-hlzft". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.071698 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd4d1d88-c894-496a-b2ee-00bf80fa2415-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fd4d1d88-c894-496a-b2ee-00bf80fa2415" (UID: "fd4d1d88-c894-496a-b2ee-00bf80fa2415"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.082610 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd4d1d88-c894-496a-b2ee-00bf80fa2415-config-data" (OuterVolumeSpecName: "config-data") pod "fd4d1d88-c894-496a-b2ee-00bf80fa2415" (UID: "fd4d1d88-c894-496a-b2ee-00bf80fa2415"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.087998 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd4d1d88-c894-496a-b2ee-00bf80fa2415-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "fd4d1d88-c894-496a-b2ee-00bf80fa2415" (UID: "fd4d1d88-c894-496a-b2ee-00bf80fa2415"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.101433 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd4d1d88-c894-496a-b2ee-00bf80fa2415-scripts" (OuterVolumeSpecName: "scripts") pod "fd4d1d88-c894-496a-b2ee-00bf80fa2415" (UID: "fd4d1d88-c894-496a-b2ee-00bf80fa2415"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.137165 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fd4d1d88-c894-496a-b2ee-00bf80fa2415-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.137249 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hlzft\" (UniqueName: \"kubernetes.io/projected/fd4d1d88-c894-496a-b2ee-00bf80fa2415-kube-api-access-hlzft\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.137267 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd4d1d88-c894-496a-b2ee-00bf80fa2415-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.137279 5002 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd4d1d88-c894-496a-b2ee-00bf80fa2415-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.137290 5002 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fd4d1d88-c894-496a-b2ee-00bf80fa2415-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.137330 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fd4d1d88-c894-496a-b2ee-00bf80fa2415-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.404358 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"104a5fb7-2a8a-4274-9c0c-84539d4b5c25","Type":"ContainerStarted","Data":"6ef541b7fdb7fed57f6d0dd7f7ae86a92815309107ad79464970ab63c64aaa8c"} Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.406127 5002 generic.go:334] "Generic (PLEG): container finished" podID="beec5517-fd48-4017-9cb3-1102001e9439" containerID="3ee31987cac31de7a53839e25d36aa1a3805641e725f6df37d71980d6c990de9" exitCode=143 Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.406176 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-86cd8676db-mltgz" event={"ID":"beec5517-fd48-4017-9cb3-1102001e9439","Type":"ContainerDied","Data":"3ee31987cac31de7a53839e25d36aa1a3805641e725f6df37d71980d6c990de9"} Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.407783 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-f8777b74-fpwh2" event={"ID":"fd4d1d88-c894-496a-b2ee-00bf80fa2415","Type":"ContainerDied","Data":"9f7eac6e87e340a6eae3d08f151d25141a38926b084074456b30b71482b6790a"} Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.407812 5002 scope.go:117] "RemoveContainer" containerID="8b84e3b52cde98cea92f145d0960b70ae2e34eb1f70dee734351f11ecd31ab55" Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.407929 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-f8777b74-fpwh2" Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.447811 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-f8777b74-fpwh2"] Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.459253 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-f8777b74-fpwh2"] Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.718405 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd4d1d88-c894-496a-b2ee-00bf80fa2415" path="/var/lib/kubelet/pods/fd4d1d88-c894-496a-b2ee-00bf80fa2415/volumes" Sep 30 12:38:10 crc kubenswrapper[5002]: I0930 12:38:10.723424 5002 scope.go:117] "RemoveContainer" containerID="20378dc396b5e4c91b822c9380bf06eb3c519550f74b233a99bb346dd6c21296" Sep 30 12:38:11 crc kubenswrapper[5002]: I0930 12:38:11.419400 5002 generic.go:334] "Generic (PLEG): container finished" podID="4fdf0c77-68ae-41ff-b6b5-122baa461b8c" containerID="12cdb900bf0fb859526f074db970c8589cc60cb37f68824e568f580a6fdf5c63" exitCode=0 Sep 30 12:38:11 crc kubenswrapper[5002]: I0930 12:38:11.419507 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-bnf58" event={"ID":"4fdf0c77-68ae-41ff-b6b5-122baa461b8c","Type":"ContainerDied","Data":"12cdb900bf0fb859526f074db970c8589cc60cb37f68824e568f580a6fdf5c63"} Sep 30 12:38:11 crc kubenswrapper[5002]: I0930 12:38:11.714841 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-wldrd"] Sep 30 12:38:11 crc kubenswrapper[5002]: E0930 12:38:11.715627 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd4d1d88-c894-496a-b2ee-00bf80fa2415" containerName="horizon" Sep 30 12:38:11 crc kubenswrapper[5002]: I0930 12:38:11.715653 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd4d1d88-c894-496a-b2ee-00bf80fa2415" containerName="horizon" Sep 30 12:38:11 crc kubenswrapper[5002]: E0930 12:38:11.715671 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd4d1d88-c894-496a-b2ee-00bf80fa2415" containerName="horizon-log" Sep 30 12:38:11 crc kubenswrapper[5002]: I0930 12:38:11.715683 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd4d1d88-c894-496a-b2ee-00bf80fa2415" containerName="horizon-log" Sep 30 12:38:11 crc kubenswrapper[5002]: E0930 12:38:11.715719 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6c79eff-4c2e-42b9-af82-df66c7091400" containerName="dnsmasq-dns" Sep 30 12:38:11 crc kubenswrapper[5002]: I0930 12:38:11.715740 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6c79eff-4c2e-42b9-af82-df66c7091400" containerName="dnsmasq-dns" Sep 30 12:38:11 crc kubenswrapper[5002]: E0930 12:38:11.715764 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6c79eff-4c2e-42b9-af82-df66c7091400" containerName="init" Sep 30 12:38:11 crc kubenswrapper[5002]: I0930 12:38:11.715772 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6c79eff-4c2e-42b9-af82-df66c7091400" containerName="init" Sep 30 12:38:11 crc kubenswrapper[5002]: I0930 12:38:11.715992 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6c79eff-4c2e-42b9-af82-df66c7091400" containerName="dnsmasq-dns" Sep 30 12:38:11 crc kubenswrapper[5002]: I0930 12:38:11.716051 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd4d1d88-c894-496a-b2ee-00bf80fa2415" containerName="horizon-log" Sep 30 12:38:11 crc kubenswrapper[5002]: I0930 12:38:11.716085 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd4d1d88-c894-496a-b2ee-00bf80fa2415" containerName="horizon" Sep 30 12:38:11 crc kubenswrapper[5002]: I0930 12:38:11.716866 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-wldrd" Sep 30 12:38:11 crc kubenswrapper[5002]: I0930 12:38:11.730108 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-wldrd"] Sep 30 12:38:11 crc kubenswrapper[5002]: I0930 12:38:11.831915 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-rb685"] Sep 30 12:38:11 crc kubenswrapper[5002]: I0930 12:38:11.833846 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-rb685" Sep 30 12:38:11 crc kubenswrapper[5002]: I0930 12:38:11.837829 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-rb685"] Sep 30 12:38:11 crc kubenswrapper[5002]: I0930 12:38:11.872838 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbmzc\" (UniqueName: \"kubernetes.io/projected/a20a6156-ca1d-4c5c-b98f-b4e062bf4e9c-kube-api-access-zbmzc\") pod \"nova-api-db-create-wldrd\" (UID: \"a20a6156-ca1d-4c5c-b98f-b4e062bf4e9c\") " pod="openstack/nova-api-db-create-wldrd" Sep 30 12:38:11 crc kubenswrapper[5002]: I0930 12:38:11.915713 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-6khcg"] Sep 30 12:38:11 crc kubenswrapper[5002]: I0930 12:38:11.917305 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-6khcg" Sep 30 12:38:11 crc kubenswrapper[5002]: I0930 12:38:11.924005 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-6khcg"] Sep 30 12:38:11 crc kubenswrapper[5002]: I0930 12:38:11.974400 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vgmd\" (UniqueName: \"kubernetes.io/projected/092e2ea7-c4f1-4ba7-b9e8-064cbbfa53aa-kube-api-access-5vgmd\") pod \"nova-cell0-db-create-rb685\" (UID: \"092e2ea7-c4f1-4ba7-b9e8-064cbbfa53aa\") " pod="openstack/nova-cell0-db-create-rb685" Sep 30 12:38:11 crc kubenswrapper[5002]: I0930 12:38:11.974730 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbmzc\" (UniqueName: \"kubernetes.io/projected/a20a6156-ca1d-4c5c-b98f-b4e062bf4e9c-kube-api-access-zbmzc\") pod \"nova-api-db-create-wldrd\" (UID: \"a20a6156-ca1d-4c5c-b98f-b4e062bf4e9c\") " pod="openstack/nova-api-db-create-wldrd" Sep 30 12:38:11 crc kubenswrapper[5002]: I0930 12:38:11.993411 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbmzc\" (UniqueName: \"kubernetes.io/projected/a20a6156-ca1d-4c5c-b98f-b4e062bf4e9c-kube-api-access-zbmzc\") pod \"nova-api-db-create-wldrd\" (UID: \"a20a6156-ca1d-4c5c-b98f-b4e062bf4e9c\") " pod="openstack/nova-api-db-create-wldrd" Sep 30 12:38:12 crc kubenswrapper[5002]: I0930 12:38:12.035934 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-wldrd" Sep 30 12:38:12 crc kubenswrapper[5002]: I0930 12:38:12.080749 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vgmd\" (UniqueName: \"kubernetes.io/projected/092e2ea7-c4f1-4ba7-b9e8-064cbbfa53aa-kube-api-access-5vgmd\") pod \"nova-cell0-db-create-rb685\" (UID: \"092e2ea7-c4f1-4ba7-b9e8-064cbbfa53aa\") " pod="openstack/nova-cell0-db-create-rb685" Sep 30 12:38:12 crc kubenswrapper[5002]: I0930 12:38:12.081013 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5zmm\" (UniqueName: \"kubernetes.io/projected/93fde69b-7152-4a47-8d1d-fe6aa7376882-kube-api-access-b5zmm\") pod \"nova-cell1-db-create-6khcg\" (UID: \"93fde69b-7152-4a47-8d1d-fe6aa7376882\") " pod="openstack/nova-cell1-db-create-6khcg" Sep 30 12:38:12 crc kubenswrapper[5002]: I0930 12:38:12.102807 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vgmd\" (UniqueName: \"kubernetes.io/projected/092e2ea7-c4f1-4ba7-b9e8-064cbbfa53aa-kube-api-access-5vgmd\") pod \"nova-cell0-db-create-rb685\" (UID: \"092e2ea7-c4f1-4ba7-b9e8-064cbbfa53aa\") " pod="openstack/nova-cell0-db-create-rb685" Sep 30 12:38:12 crc kubenswrapper[5002]: I0930 12:38:12.157306 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-rb685" Sep 30 12:38:12 crc kubenswrapper[5002]: I0930 12:38:12.182538 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5zmm\" (UniqueName: \"kubernetes.io/projected/93fde69b-7152-4a47-8d1d-fe6aa7376882-kube-api-access-b5zmm\") pod \"nova-cell1-db-create-6khcg\" (UID: \"93fde69b-7152-4a47-8d1d-fe6aa7376882\") " pod="openstack/nova-cell1-db-create-6khcg" Sep 30 12:38:12 crc kubenswrapper[5002]: I0930 12:38:12.204156 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5zmm\" (UniqueName: \"kubernetes.io/projected/93fde69b-7152-4a47-8d1d-fe6aa7376882-kube-api-access-b5zmm\") pod \"nova-cell1-db-create-6khcg\" (UID: \"93fde69b-7152-4a47-8d1d-fe6aa7376882\") " pod="openstack/nova-cell1-db-create-6khcg" Sep 30 12:38:12 crc kubenswrapper[5002]: I0930 12:38:12.232450 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-6khcg" Sep 30 12:38:12 crc kubenswrapper[5002]: I0930 12:38:12.446837 5002 generic.go:334] "Generic (PLEG): container finished" podID="6c398da4-8e97-4ee7-83bb-f958c41fabff" containerID="a99c08c483c48a24fd03aaf010f4c106d01c65729b9e0bebd44143567528ada9" exitCode=0 Sep 30 12:38:12 crc kubenswrapper[5002]: I0930 12:38:12.446914 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-vxpxc" event={"ID":"6c398da4-8e97-4ee7-83bb-f958c41fabff","Type":"ContainerDied","Data":"a99c08c483c48a24fd03aaf010f4c106d01c65729b9e0bebd44143567528ada9"} Sep 30 12:38:12 crc kubenswrapper[5002]: I0930 12:38:12.449933 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"104a5fb7-2a8a-4274-9c0c-84539d4b5c25","Type":"ContainerStarted","Data":"9bf12d45e3637bb9b062faba5f5b91b102a2e8227cdbbee843aa7d0917caa157"} Sep 30 12:38:12 crc kubenswrapper[5002]: I0930 12:38:12.450018 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 12:38:12 crc kubenswrapper[5002]: I0930 12:38:12.450050 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="104a5fb7-2a8a-4274-9c0c-84539d4b5c25" containerName="sg-core" containerID="cri-o://6ef541b7fdb7fed57f6d0dd7f7ae86a92815309107ad79464970ab63c64aaa8c" gracePeriod=30 Sep 30 12:38:12 crc kubenswrapper[5002]: I0930 12:38:12.450063 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="104a5fb7-2a8a-4274-9c0c-84539d4b5c25" containerName="proxy-httpd" containerID="cri-o://9bf12d45e3637bb9b062faba5f5b91b102a2e8227cdbbee843aa7d0917caa157" gracePeriod=30 Sep 30 12:38:12 crc kubenswrapper[5002]: I0930 12:38:12.450129 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="104a5fb7-2a8a-4274-9c0c-84539d4b5c25" containerName="ceilometer-notification-agent" containerID="cri-o://bb71f6e3d773533a93b0a891ae606006d2c610a4f16dc3bca663666f79d39d21" gracePeriod=30 Sep 30 12:38:12 crc kubenswrapper[5002]: I0930 12:38:12.450297 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="104a5fb7-2a8a-4274-9c0c-84539d4b5c25" containerName="ceilometer-central-agent" containerID="cri-o://45b5be152eee9ce326702fbce8687a56b0cf91fc38c9bfa4bf289fa91859cdba" gracePeriod=30 Sep 30 12:38:12 crc kubenswrapper[5002]: I0930 12:38:12.525145 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.775577454 podStartE2EDuration="14.525122976s" podCreationTimestamp="2025-09-30 12:37:58 +0000 UTC" firstStartedPulling="2025-09-30 12:37:59.460129459 +0000 UTC m=+1053.709811605" lastFinishedPulling="2025-09-30 12:38:11.209674981 +0000 UTC m=+1065.459357127" observedRunningTime="2025-09-30 12:38:12.499290059 +0000 UTC m=+1066.748972225" watchObservedRunningTime="2025-09-30 12:38:12.525122976 +0000 UTC m=+1066.774805122" Sep 30 12:38:12 crc kubenswrapper[5002]: I0930 12:38:12.579505 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-wldrd"] Sep 30 12:38:12 crc kubenswrapper[5002]: E0930 12:38:12.721152 5002 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod104a5fb7_2a8a_4274_9c0c_84539d4b5c25.slice/crio-conmon-6ef541b7fdb7fed57f6d0dd7f7ae86a92815309107ad79464970ab63c64aaa8c.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod104a5fb7_2a8a_4274_9c0c_84539d4b5c25.slice/crio-9bf12d45e3637bb9b062faba5f5b91b102a2e8227cdbbee843aa7d0917caa157.scope\": RecentStats: unable to find data in memory cache]" Sep 30 12:38:12 crc kubenswrapper[5002]: I0930 12:38:12.742287 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-rb685"] Sep 30 12:38:12 crc kubenswrapper[5002]: I0930 12:38:12.778756 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-86cd8676db-mltgz" podUID="beec5517-fd48-4017-9cb3-1102001e9439" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": read tcp 10.217.0.2:56026->10.217.0.159:9311: read: connection reset by peer" Sep 30 12:38:12 crc kubenswrapper[5002]: I0930 12:38:12.779079 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-86cd8676db-mltgz" podUID="beec5517-fd48-4017-9cb3-1102001e9439" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": read tcp 10.217.0.2:56024->10.217.0.159:9311: read: connection reset by peer" Sep 30 12:38:12 crc kubenswrapper[5002]: I0930 12:38:12.867135 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-6khcg"] Sep 30 12:38:12 crc kubenswrapper[5002]: I0930 12:38:12.941754 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-bnf58" Sep 30 12:38:12 crc kubenswrapper[5002]: I0930 12:38:12.998587 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-config-data\") pod \"4fdf0c77-68ae-41ff-b6b5-122baa461b8c\" (UID: \"4fdf0c77-68ae-41ff-b6b5-122baa461b8c\") " Sep 30 12:38:12 crc kubenswrapper[5002]: I0930 12:38:12.998691 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-logs\") pod \"4fdf0c77-68ae-41ff-b6b5-122baa461b8c\" (UID: \"4fdf0c77-68ae-41ff-b6b5-122baa461b8c\") " Sep 30 12:38:12 crc kubenswrapper[5002]: I0930 12:38:12.998739 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-scripts\") pod \"4fdf0c77-68ae-41ff-b6b5-122baa461b8c\" (UID: \"4fdf0c77-68ae-41ff-b6b5-122baa461b8c\") " Sep 30 12:38:12 crc kubenswrapper[5002]: I0930 12:38:12.998784 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-combined-ca-bundle\") pod \"4fdf0c77-68ae-41ff-b6b5-122baa461b8c\" (UID: \"4fdf0c77-68ae-41ff-b6b5-122baa461b8c\") " Sep 30 12:38:12 crc kubenswrapper[5002]: I0930 12:38:12.998821 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9pmcw\" (UniqueName: \"kubernetes.io/projected/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-kube-api-access-9pmcw\") pod \"4fdf0c77-68ae-41ff-b6b5-122baa461b8c\" (UID: \"4fdf0c77-68ae-41ff-b6b5-122baa461b8c\") " Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.004397 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-logs" (OuterVolumeSpecName: "logs") pod "4fdf0c77-68ae-41ff-b6b5-122baa461b8c" (UID: "4fdf0c77-68ae-41ff-b6b5-122baa461b8c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.006665 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-scripts" (OuterVolumeSpecName: "scripts") pod "4fdf0c77-68ae-41ff-b6b5-122baa461b8c" (UID: "4fdf0c77-68ae-41ff-b6b5-122baa461b8c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.009295 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-kube-api-access-9pmcw" (OuterVolumeSpecName: "kube-api-access-9pmcw") pod "4fdf0c77-68ae-41ff-b6b5-122baa461b8c" (UID: "4fdf0c77-68ae-41ff-b6b5-122baa461b8c"). InnerVolumeSpecName "kube-api-access-9pmcw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.079697 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4fdf0c77-68ae-41ff-b6b5-122baa461b8c" (UID: "4fdf0c77-68ae-41ff-b6b5-122baa461b8c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.083301 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-config-data" (OuterVolumeSpecName: "config-data") pod "4fdf0c77-68ae-41ff-b6b5-122baa461b8c" (UID: "4fdf0c77-68ae-41ff-b6b5-122baa461b8c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.100666 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.100709 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-logs\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.100721 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.100731 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.100747 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9pmcw\" (UniqueName: \"kubernetes.io/projected/4fdf0c77-68ae-41ff-b6b5-122baa461b8c-kube-api-access-9pmcw\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.280431 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-86cd8676db-mltgz" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.404686 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/beec5517-fd48-4017-9cb3-1102001e9439-config-data-custom\") pod \"beec5517-fd48-4017-9cb3-1102001e9439\" (UID: \"beec5517-fd48-4017-9cb3-1102001e9439\") " Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.404778 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7vn8p\" (UniqueName: \"kubernetes.io/projected/beec5517-fd48-4017-9cb3-1102001e9439-kube-api-access-7vn8p\") pod \"beec5517-fd48-4017-9cb3-1102001e9439\" (UID: \"beec5517-fd48-4017-9cb3-1102001e9439\") " Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.404804 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/beec5517-fd48-4017-9cb3-1102001e9439-config-data\") pod \"beec5517-fd48-4017-9cb3-1102001e9439\" (UID: \"beec5517-fd48-4017-9cb3-1102001e9439\") " Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.404988 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/beec5517-fd48-4017-9cb3-1102001e9439-combined-ca-bundle\") pod \"beec5517-fd48-4017-9cb3-1102001e9439\" (UID: \"beec5517-fd48-4017-9cb3-1102001e9439\") " Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.405115 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/beec5517-fd48-4017-9cb3-1102001e9439-logs\") pod \"beec5517-fd48-4017-9cb3-1102001e9439\" (UID: \"beec5517-fd48-4017-9cb3-1102001e9439\") " Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.405458 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/beec5517-fd48-4017-9cb3-1102001e9439-logs" (OuterVolumeSpecName: "logs") pod "beec5517-fd48-4017-9cb3-1102001e9439" (UID: "beec5517-fd48-4017-9cb3-1102001e9439"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.405646 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/beec5517-fd48-4017-9cb3-1102001e9439-logs\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.408842 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/beec5517-fd48-4017-9cb3-1102001e9439-kube-api-access-7vn8p" (OuterVolumeSpecName: "kube-api-access-7vn8p") pod "beec5517-fd48-4017-9cb3-1102001e9439" (UID: "beec5517-fd48-4017-9cb3-1102001e9439"). InnerVolumeSpecName "kube-api-access-7vn8p". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.415612 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/beec5517-fd48-4017-9cb3-1102001e9439-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "beec5517-fd48-4017-9cb3-1102001e9439" (UID: "beec5517-fd48-4017-9cb3-1102001e9439"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.458836 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/beec5517-fd48-4017-9cb3-1102001e9439-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "beec5517-fd48-4017-9cb3-1102001e9439" (UID: "beec5517-fd48-4017-9cb3-1102001e9439"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.460692 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/beec5517-fd48-4017-9cb3-1102001e9439-config-data" (OuterVolumeSpecName: "config-data") pod "beec5517-fd48-4017-9cb3-1102001e9439" (UID: "beec5517-fd48-4017-9cb3-1102001e9439"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.469191 5002 generic.go:334] "Generic (PLEG): container finished" podID="104a5fb7-2a8a-4274-9c0c-84539d4b5c25" containerID="9bf12d45e3637bb9b062faba5f5b91b102a2e8227cdbbee843aa7d0917caa157" exitCode=0 Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.469222 5002 generic.go:334] "Generic (PLEG): container finished" podID="104a5fb7-2a8a-4274-9c0c-84539d4b5c25" containerID="6ef541b7fdb7fed57f6d0dd7f7ae86a92815309107ad79464970ab63c64aaa8c" exitCode=2 Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.469233 5002 generic.go:334] "Generic (PLEG): container finished" podID="104a5fb7-2a8a-4274-9c0c-84539d4b5c25" containerID="bb71f6e3d773533a93b0a891ae606006d2c610a4f16dc3bca663666f79d39d21" exitCode=0 Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.469269 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"104a5fb7-2a8a-4274-9c0c-84539d4b5c25","Type":"ContainerDied","Data":"9bf12d45e3637bb9b062faba5f5b91b102a2e8227cdbbee843aa7d0917caa157"} Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.469292 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"104a5fb7-2a8a-4274-9c0c-84539d4b5c25","Type":"ContainerDied","Data":"6ef541b7fdb7fed57f6d0dd7f7ae86a92815309107ad79464970ab63c64aaa8c"} Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.469303 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"104a5fb7-2a8a-4274-9c0c-84539d4b5c25","Type":"ContainerDied","Data":"bb71f6e3d773533a93b0a891ae606006d2c610a4f16dc3bca663666f79d39d21"} Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.472154 5002 generic.go:334] "Generic (PLEG): container finished" podID="a20a6156-ca1d-4c5c-b98f-b4e062bf4e9c" containerID="cceaa8ffba75c4a886212eaae9b7cec4b36f414dcd3dec93a47c8ca5278f9e2c" exitCode=0 Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.472349 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-wldrd" event={"ID":"a20a6156-ca1d-4c5c-b98f-b4e062bf4e9c","Type":"ContainerDied","Data":"cceaa8ffba75c4a886212eaae9b7cec4b36f414dcd3dec93a47c8ca5278f9e2c"} Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.472492 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-wldrd" event={"ID":"a20a6156-ca1d-4c5c-b98f-b4e062bf4e9c","Type":"ContainerStarted","Data":"0f0b80a309ee68320b12139cf2ba5158bf0fd5af7a7a7fa0b57da5ec680feeff"} Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.474901 5002 generic.go:334] "Generic (PLEG): container finished" podID="092e2ea7-c4f1-4ba7-b9e8-064cbbfa53aa" containerID="d609d8d01b5ef3799f852ae0abba88b8382e4a4f949ffd3f883324d4be043ac7" exitCode=0 Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.475064 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-rb685" event={"ID":"092e2ea7-c4f1-4ba7-b9e8-064cbbfa53aa","Type":"ContainerDied","Data":"d609d8d01b5ef3799f852ae0abba88b8382e4a4f949ffd3f883324d4be043ac7"} Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.475089 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-rb685" event={"ID":"092e2ea7-c4f1-4ba7-b9e8-064cbbfa53aa","Type":"ContainerStarted","Data":"c711b13901448891370eb547d8714f49380224669929b51dacac9bd96d14175a"} Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.477999 5002 generic.go:334] "Generic (PLEG): container finished" podID="beec5517-fd48-4017-9cb3-1102001e9439" containerID="2ce09c6165125531983dc65ca7e3bf84ffa6db66b1a0a6cdbe41ee301e6b6e0a" exitCode=0 Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.478143 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-86cd8676db-mltgz" event={"ID":"beec5517-fd48-4017-9cb3-1102001e9439","Type":"ContainerDied","Data":"2ce09c6165125531983dc65ca7e3bf84ffa6db66b1a0a6cdbe41ee301e6b6e0a"} Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.478158 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-86cd8676db-mltgz" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.478210 5002 scope.go:117] "RemoveContainer" containerID="2ce09c6165125531983dc65ca7e3bf84ffa6db66b1a0a6cdbe41ee301e6b6e0a" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.478175 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-86cd8676db-mltgz" event={"ID":"beec5517-fd48-4017-9cb3-1102001e9439","Type":"ContainerDied","Data":"6704f3167390e0d9317ad30e3d1665ea25c069e3e46c633a55eb726dca823457"} Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.482053 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-6khcg" event={"ID":"93fde69b-7152-4a47-8d1d-fe6aa7376882","Type":"ContainerStarted","Data":"99e4b3a17cc927ad3e605fd9a30bdbb8f060cee29ef555dad7ccd90f2d022022"} Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.482087 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-6khcg" event={"ID":"93fde69b-7152-4a47-8d1d-fe6aa7376882","Type":"ContainerStarted","Data":"47bfca6a5446f2085447b2740c115f277d2a62e0f4a0b61c810b8337c39fc855"} Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.486627 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-bnf58" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.486848 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-bnf58" event={"ID":"4fdf0c77-68ae-41ff-b6b5-122baa461b8c","Type":"ContainerDied","Data":"12233c05675c8fbfbf484b5dec280ffde667e0ac32fa8dbd55cf0c01d16dd854"} Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.487010 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="12233c05675c8fbfbf484b5dec280ffde667e0ac32fa8dbd55cf0c01d16dd854" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.508930 5002 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/beec5517-fd48-4017-9cb3-1102001e9439-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.508957 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7vn8p\" (UniqueName: \"kubernetes.io/projected/beec5517-fd48-4017-9cb3-1102001e9439-kube-api-access-7vn8p\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.508967 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/beec5517-fd48-4017-9cb3-1102001e9439-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.508979 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/beec5517-fd48-4017-9cb3-1102001e9439-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.561028 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-86cd8676db-mltgz"] Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.572055 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-86cd8676db-mltgz"] Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.581709 5002 scope.go:117] "RemoveContainer" containerID="3ee31987cac31de7a53839e25d36aa1a3805641e725f6df37d71980d6c990de9" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.617530 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-6f8969955b-64x4f"] Sep 30 12:38:13 crc kubenswrapper[5002]: E0930 12:38:13.617903 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="beec5517-fd48-4017-9cb3-1102001e9439" containerName="barbican-api-log" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.617921 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="beec5517-fd48-4017-9cb3-1102001e9439" containerName="barbican-api-log" Sep 30 12:38:13 crc kubenswrapper[5002]: E0930 12:38:13.617941 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="beec5517-fd48-4017-9cb3-1102001e9439" containerName="barbican-api" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.617946 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="beec5517-fd48-4017-9cb3-1102001e9439" containerName="barbican-api" Sep 30 12:38:13 crc kubenswrapper[5002]: E0930 12:38:13.617957 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fdf0c77-68ae-41ff-b6b5-122baa461b8c" containerName="placement-db-sync" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.617963 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fdf0c77-68ae-41ff-b6b5-122baa461b8c" containerName="placement-db-sync" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.618112 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fdf0c77-68ae-41ff-b6b5-122baa461b8c" containerName="placement-db-sync" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.618129 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="beec5517-fd48-4017-9cb3-1102001e9439" containerName="barbican-api-log" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.618149 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="beec5517-fd48-4017-9cb3-1102001e9439" containerName="barbican-api" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.618994 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6f8969955b-64x4f" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.621771 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-znl4p" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.622067 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.622361 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.622445 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.622406 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.636398 5002 scope.go:117] "RemoveContainer" containerID="2ce09c6165125531983dc65ca7e3bf84ffa6db66b1a0a6cdbe41ee301e6b6e0a" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.638296 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6f8969955b-64x4f"] Sep 30 12:38:13 crc kubenswrapper[5002]: E0930 12:38:13.639755 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ce09c6165125531983dc65ca7e3bf84ffa6db66b1a0a6cdbe41ee301e6b6e0a\": container with ID starting with 2ce09c6165125531983dc65ca7e3bf84ffa6db66b1a0a6cdbe41ee301e6b6e0a not found: ID does not exist" containerID="2ce09c6165125531983dc65ca7e3bf84ffa6db66b1a0a6cdbe41ee301e6b6e0a" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.639812 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ce09c6165125531983dc65ca7e3bf84ffa6db66b1a0a6cdbe41ee301e6b6e0a"} err="failed to get container status \"2ce09c6165125531983dc65ca7e3bf84ffa6db66b1a0a6cdbe41ee301e6b6e0a\": rpc error: code = NotFound desc = could not find container \"2ce09c6165125531983dc65ca7e3bf84ffa6db66b1a0a6cdbe41ee301e6b6e0a\": container with ID starting with 2ce09c6165125531983dc65ca7e3bf84ffa6db66b1a0a6cdbe41ee301e6b6e0a not found: ID does not exist" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.639848 5002 scope.go:117] "RemoveContainer" containerID="3ee31987cac31de7a53839e25d36aa1a3805641e725f6df37d71980d6c990de9" Sep 30 12:38:13 crc kubenswrapper[5002]: E0930 12:38:13.640274 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ee31987cac31de7a53839e25d36aa1a3805641e725f6df37d71980d6c990de9\": container with ID starting with 3ee31987cac31de7a53839e25d36aa1a3805641e725f6df37d71980d6c990de9 not found: ID does not exist" containerID="3ee31987cac31de7a53839e25d36aa1a3805641e725f6df37d71980d6c990de9" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.640299 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ee31987cac31de7a53839e25d36aa1a3805641e725f6df37d71980d6c990de9"} err="failed to get container status \"3ee31987cac31de7a53839e25d36aa1a3805641e725f6df37d71980d6c990de9\": rpc error: code = NotFound desc = could not find container \"3ee31987cac31de7a53839e25d36aa1a3805641e725f6df37d71980d6c990de9\": container with ID starting with 3ee31987cac31de7a53839e25d36aa1a3805641e725f6df37d71980d6c990de9 not found: ID does not exist" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.712809 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0d09801-8aec-455d-8e90-cad2e5f5a04e-config-data\") pod \"placement-6f8969955b-64x4f\" (UID: \"c0d09801-8aec-455d-8e90-cad2e5f5a04e\") " pod="openstack/placement-6f8969955b-64x4f" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.712845 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0d09801-8aec-455d-8e90-cad2e5f5a04e-combined-ca-bundle\") pod \"placement-6f8969955b-64x4f\" (UID: \"c0d09801-8aec-455d-8e90-cad2e5f5a04e\") " pod="openstack/placement-6f8969955b-64x4f" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.712877 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0d09801-8aec-455d-8e90-cad2e5f5a04e-internal-tls-certs\") pod \"placement-6f8969955b-64x4f\" (UID: \"c0d09801-8aec-455d-8e90-cad2e5f5a04e\") " pod="openstack/placement-6f8969955b-64x4f" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.712912 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0d09801-8aec-455d-8e90-cad2e5f5a04e-public-tls-certs\") pod \"placement-6f8969955b-64x4f\" (UID: \"c0d09801-8aec-455d-8e90-cad2e5f5a04e\") " pod="openstack/placement-6f8969955b-64x4f" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.712928 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gbbbs\" (UniqueName: \"kubernetes.io/projected/c0d09801-8aec-455d-8e90-cad2e5f5a04e-kube-api-access-gbbbs\") pod \"placement-6f8969955b-64x4f\" (UID: \"c0d09801-8aec-455d-8e90-cad2e5f5a04e\") " pod="openstack/placement-6f8969955b-64x4f" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.713071 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0d09801-8aec-455d-8e90-cad2e5f5a04e-scripts\") pod \"placement-6f8969955b-64x4f\" (UID: \"c0d09801-8aec-455d-8e90-cad2e5f5a04e\") " pod="openstack/placement-6f8969955b-64x4f" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.713239 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0d09801-8aec-455d-8e90-cad2e5f5a04e-logs\") pod \"placement-6f8969955b-64x4f\" (UID: \"c0d09801-8aec-455d-8e90-cad2e5f5a04e\") " pod="openstack/placement-6f8969955b-64x4f" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.814727 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0d09801-8aec-455d-8e90-cad2e5f5a04e-public-tls-certs\") pod \"placement-6f8969955b-64x4f\" (UID: \"c0d09801-8aec-455d-8e90-cad2e5f5a04e\") " pod="openstack/placement-6f8969955b-64x4f" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.815059 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gbbbs\" (UniqueName: \"kubernetes.io/projected/c0d09801-8aec-455d-8e90-cad2e5f5a04e-kube-api-access-gbbbs\") pod \"placement-6f8969955b-64x4f\" (UID: \"c0d09801-8aec-455d-8e90-cad2e5f5a04e\") " pod="openstack/placement-6f8969955b-64x4f" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.815095 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0d09801-8aec-455d-8e90-cad2e5f5a04e-scripts\") pod \"placement-6f8969955b-64x4f\" (UID: \"c0d09801-8aec-455d-8e90-cad2e5f5a04e\") " pod="openstack/placement-6f8969955b-64x4f" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.815508 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0d09801-8aec-455d-8e90-cad2e5f5a04e-logs\") pod \"placement-6f8969955b-64x4f\" (UID: \"c0d09801-8aec-455d-8e90-cad2e5f5a04e\") " pod="openstack/placement-6f8969955b-64x4f" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.815608 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0d09801-8aec-455d-8e90-cad2e5f5a04e-config-data\") pod \"placement-6f8969955b-64x4f\" (UID: \"c0d09801-8aec-455d-8e90-cad2e5f5a04e\") " pod="openstack/placement-6f8969955b-64x4f" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.815627 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0d09801-8aec-455d-8e90-cad2e5f5a04e-combined-ca-bundle\") pod \"placement-6f8969955b-64x4f\" (UID: \"c0d09801-8aec-455d-8e90-cad2e5f5a04e\") " pod="openstack/placement-6f8969955b-64x4f" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.815654 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0d09801-8aec-455d-8e90-cad2e5f5a04e-internal-tls-certs\") pod \"placement-6f8969955b-64x4f\" (UID: \"c0d09801-8aec-455d-8e90-cad2e5f5a04e\") " pod="openstack/placement-6f8969955b-64x4f" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.816407 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0d09801-8aec-455d-8e90-cad2e5f5a04e-logs\") pod \"placement-6f8969955b-64x4f\" (UID: \"c0d09801-8aec-455d-8e90-cad2e5f5a04e\") " pod="openstack/placement-6f8969955b-64x4f" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.820044 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0d09801-8aec-455d-8e90-cad2e5f5a04e-public-tls-certs\") pod \"placement-6f8969955b-64x4f\" (UID: \"c0d09801-8aec-455d-8e90-cad2e5f5a04e\") " pod="openstack/placement-6f8969955b-64x4f" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.823657 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0d09801-8aec-455d-8e90-cad2e5f5a04e-internal-tls-certs\") pod \"placement-6f8969955b-64x4f\" (UID: \"c0d09801-8aec-455d-8e90-cad2e5f5a04e\") " pod="openstack/placement-6f8969955b-64x4f" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.823815 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0d09801-8aec-455d-8e90-cad2e5f5a04e-config-data\") pod \"placement-6f8969955b-64x4f\" (UID: \"c0d09801-8aec-455d-8e90-cad2e5f5a04e\") " pod="openstack/placement-6f8969955b-64x4f" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.823926 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0d09801-8aec-455d-8e90-cad2e5f5a04e-combined-ca-bundle\") pod \"placement-6f8969955b-64x4f\" (UID: \"c0d09801-8aec-455d-8e90-cad2e5f5a04e\") " pod="openstack/placement-6f8969955b-64x4f" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.825371 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0d09801-8aec-455d-8e90-cad2e5f5a04e-scripts\") pod \"placement-6f8969955b-64x4f\" (UID: \"c0d09801-8aec-455d-8e90-cad2e5f5a04e\") " pod="openstack/placement-6f8969955b-64x4f" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.829769 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-vxpxc" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.834847 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gbbbs\" (UniqueName: \"kubernetes.io/projected/c0d09801-8aec-455d-8e90-cad2e5f5a04e-kube-api-access-gbbbs\") pod \"placement-6f8969955b-64x4f\" (UID: \"c0d09801-8aec-455d-8e90-cad2e5f5a04e\") " pod="openstack/placement-6f8969955b-64x4f" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.917712 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c398da4-8e97-4ee7-83bb-f958c41fabff-scripts\") pod \"6c398da4-8e97-4ee7-83bb-f958c41fabff\" (UID: \"6c398da4-8e97-4ee7-83bb-f958c41fabff\") " Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.917774 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6c398da4-8e97-4ee7-83bb-f958c41fabff-db-sync-config-data\") pod \"6c398da4-8e97-4ee7-83bb-f958c41fabff\" (UID: \"6c398da4-8e97-4ee7-83bb-f958c41fabff\") " Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.917884 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjgs2\" (UniqueName: \"kubernetes.io/projected/6c398da4-8e97-4ee7-83bb-f958c41fabff-kube-api-access-fjgs2\") pod \"6c398da4-8e97-4ee7-83bb-f958c41fabff\" (UID: \"6c398da4-8e97-4ee7-83bb-f958c41fabff\") " Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.917923 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c398da4-8e97-4ee7-83bb-f958c41fabff-config-data\") pod \"6c398da4-8e97-4ee7-83bb-f958c41fabff\" (UID: \"6c398da4-8e97-4ee7-83bb-f958c41fabff\") " Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.917968 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c398da4-8e97-4ee7-83bb-f958c41fabff-combined-ca-bundle\") pod \"6c398da4-8e97-4ee7-83bb-f958c41fabff\" (UID: \"6c398da4-8e97-4ee7-83bb-f958c41fabff\") " Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.917989 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6c398da4-8e97-4ee7-83bb-f958c41fabff-etc-machine-id\") pod \"6c398da4-8e97-4ee7-83bb-f958c41fabff\" (UID: \"6c398da4-8e97-4ee7-83bb-f958c41fabff\") " Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.918416 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6c398da4-8e97-4ee7-83bb-f958c41fabff-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "6c398da4-8e97-4ee7-83bb-f958c41fabff" (UID: "6c398da4-8e97-4ee7-83bb-f958c41fabff"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.921088 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c398da4-8e97-4ee7-83bb-f958c41fabff-scripts" (OuterVolumeSpecName: "scripts") pod "6c398da4-8e97-4ee7-83bb-f958c41fabff" (UID: "6c398da4-8e97-4ee7-83bb-f958c41fabff"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.923049 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c398da4-8e97-4ee7-83bb-f958c41fabff-kube-api-access-fjgs2" (OuterVolumeSpecName: "kube-api-access-fjgs2") pod "6c398da4-8e97-4ee7-83bb-f958c41fabff" (UID: "6c398da4-8e97-4ee7-83bb-f958c41fabff"). InnerVolumeSpecName "kube-api-access-fjgs2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.923370 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c398da4-8e97-4ee7-83bb-f958c41fabff-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "6c398da4-8e97-4ee7-83bb-f958c41fabff" (UID: "6c398da4-8e97-4ee7-83bb-f958c41fabff"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.942719 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c398da4-8e97-4ee7-83bb-f958c41fabff-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6c398da4-8e97-4ee7-83bb-f958c41fabff" (UID: "6c398da4-8e97-4ee7-83bb-f958c41fabff"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.956242 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6f8969955b-64x4f" Sep 30 12:38:13 crc kubenswrapper[5002]: I0930 12:38:13.969142 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c398da4-8e97-4ee7-83bb-f958c41fabff-config-data" (OuterVolumeSpecName: "config-data") pod "6c398da4-8e97-4ee7-83bb-f958c41fabff" (UID: "6c398da4-8e97-4ee7-83bb-f958c41fabff"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.020625 5002 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6c398da4-8e97-4ee7-83bb-f958c41fabff-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.020661 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjgs2\" (UniqueName: \"kubernetes.io/projected/6c398da4-8e97-4ee7-83bb-f958c41fabff-kube-api-access-fjgs2\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.020673 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c398da4-8e97-4ee7-83bb-f958c41fabff-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.020682 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c398da4-8e97-4ee7-83bb-f958c41fabff-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.020693 5002 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6c398da4-8e97-4ee7-83bb-f958c41fabff-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.020702 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c398da4-8e97-4ee7-83bb-f958c41fabff-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.248397 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.325829 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-log-httpd\") pod \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\" (UID: \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\") " Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.325961 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-config-data\") pod \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\" (UID: \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\") " Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.326011 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-scripts\") pod \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\" (UID: \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\") " Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.326082 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-run-httpd\") pod \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\" (UID: \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\") " Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.326116 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-combined-ca-bundle\") pod \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\" (UID: \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\") " Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.326217 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z45jl\" (UniqueName: \"kubernetes.io/projected/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-kube-api-access-z45jl\") pod \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\" (UID: \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\") " Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.326309 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-sg-core-conf-yaml\") pod \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\" (UID: \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\") " Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.326536 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "104a5fb7-2a8a-4274-9c0c-84539d4b5c25" (UID: "104a5fb7-2a8a-4274-9c0c-84539d4b5c25"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.327134 5002 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.330401 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "104a5fb7-2a8a-4274-9c0c-84539d4b5c25" (UID: "104a5fb7-2a8a-4274-9c0c-84539d4b5c25"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.332769 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-scripts" (OuterVolumeSpecName: "scripts") pod "104a5fb7-2a8a-4274-9c0c-84539d4b5c25" (UID: "104a5fb7-2a8a-4274-9c0c-84539d4b5c25"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.334665 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-kube-api-access-z45jl" (OuterVolumeSpecName: "kube-api-access-z45jl") pod "104a5fb7-2a8a-4274-9c0c-84539d4b5c25" (UID: "104a5fb7-2a8a-4274-9c0c-84539d4b5c25"). InnerVolumeSpecName "kube-api-access-z45jl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.372623 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "104a5fb7-2a8a-4274-9c0c-84539d4b5c25" (UID: "104a5fb7-2a8a-4274-9c0c-84539d4b5c25"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.427291 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "104a5fb7-2a8a-4274-9c0c-84539d4b5c25" (UID: "104a5fb7-2a8a-4274-9c0c-84539d4b5c25"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.427975 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-combined-ca-bundle\") pod \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\" (UID: \"104a5fb7-2a8a-4274-9c0c-84539d4b5c25\") " Sep 30 12:38:14 crc kubenswrapper[5002]: W0930 12:38:14.428084 5002 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/104a5fb7-2a8a-4274-9c0c-84539d4b5c25/volumes/kubernetes.io~secret/combined-ca-bundle Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.428108 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "104a5fb7-2a8a-4274-9c0c-84539d4b5c25" (UID: "104a5fb7-2a8a-4274-9c0c-84539d4b5c25"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.428359 5002 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.428377 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.428386 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.428397 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z45jl\" (UniqueName: \"kubernetes.io/projected/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-kube-api-access-z45jl\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.428405 5002 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.459361 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-config-data" (OuterVolumeSpecName: "config-data") pod "104a5fb7-2a8a-4274-9c0c-84539d4b5c25" (UID: "104a5fb7-2a8a-4274-9c0c-84539d4b5c25"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:14 crc kubenswrapper[5002]: W0930 12:38:14.509949 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc0d09801_8aec_455d_8e90_cad2e5f5a04e.slice/crio-4d2f34d12d7a60e1b7ceebb1e74c7274feb69dbb4b4eb20d148ab968c3cbf755 WatchSource:0}: Error finding container 4d2f34d12d7a60e1b7ceebb1e74c7274feb69dbb4b4eb20d148ab968c3cbf755: Status 404 returned error can't find the container with id 4d2f34d12d7a60e1b7ceebb1e74c7274feb69dbb4b4eb20d148ab968c3cbf755 Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.510926 5002 generic.go:334] "Generic (PLEG): container finished" podID="104a5fb7-2a8a-4274-9c0c-84539d4b5c25" containerID="45b5be152eee9ce326702fbce8687a56b0cf91fc38c9bfa4bf289fa91859cdba" exitCode=0 Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.510990 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.511488 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"104a5fb7-2a8a-4274-9c0c-84539d4b5c25","Type":"ContainerDied","Data":"45b5be152eee9ce326702fbce8687a56b0cf91fc38c9bfa4bf289fa91859cdba"} Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.511526 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"104a5fb7-2a8a-4274-9c0c-84539d4b5c25","Type":"ContainerDied","Data":"07e08889cc4dd4f33688980cb7fbbc8f3d29925062744af37083ba9c0cbe7e7c"} Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.511545 5002 scope.go:117] "RemoveContainer" containerID="9bf12d45e3637bb9b062faba5f5b91b102a2e8227cdbbee843aa7d0917caa157" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.516901 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-vxpxc" event={"ID":"6c398da4-8e97-4ee7-83bb-f958c41fabff","Type":"ContainerDied","Data":"8ba4da71cb92298baa03729a78378de20cc7ecf6c833aec55776935cb5eb16c0"} Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.516940 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ba4da71cb92298baa03729a78378de20cc7ecf6c833aec55776935cb5eb16c0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.516914 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-vxpxc" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.517577 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6f8969955b-64x4f"] Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.522122 5002 generic.go:334] "Generic (PLEG): container finished" podID="93fde69b-7152-4a47-8d1d-fe6aa7376882" containerID="99e4b3a17cc927ad3e605fd9a30bdbb8f060cee29ef555dad7ccd90f2d022022" exitCode=0 Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.522339 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-6khcg" event={"ID":"93fde69b-7152-4a47-8d1d-fe6aa7376882","Type":"ContainerDied","Data":"99e4b3a17cc927ad3e605fd9a30bdbb8f060cee29ef555dad7ccd90f2d022022"} Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.530955 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/104a5fb7-2a8a-4274-9c0c-84539d4b5c25-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.553076 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.573674 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.580770 5002 scope.go:117] "RemoveContainer" containerID="6ef541b7fdb7fed57f6d0dd7f7ae86a92815309107ad79464970ab63c64aaa8c" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.588138 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:38:14 crc kubenswrapper[5002]: E0930 12:38:14.588636 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="104a5fb7-2a8a-4274-9c0c-84539d4b5c25" containerName="sg-core" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.588662 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="104a5fb7-2a8a-4274-9c0c-84539d4b5c25" containerName="sg-core" Sep 30 12:38:14 crc kubenswrapper[5002]: E0930 12:38:14.588696 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="104a5fb7-2a8a-4274-9c0c-84539d4b5c25" containerName="ceilometer-notification-agent" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.588720 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="104a5fb7-2a8a-4274-9c0c-84539d4b5c25" containerName="ceilometer-notification-agent" Sep 30 12:38:14 crc kubenswrapper[5002]: E0930 12:38:14.588737 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c398da4-8e97-4ee7-83bb-f958c41fabff" containerName="cinder-db-sync" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.588746 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c398da4-8e97-4ee7-83bb-f958c41fabff" containerName="cinder-db-sync" Sep 30 12:38:14 crc kubenswrapper[5002]: E0930 12:38:14.588761 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="104a5fb7-2a8a-4274-9c0c-84539d4b5c25" containerName="proxy-httpd" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.588767 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="104a5fb7-2a8a-4274-9c0c-84539d4b5c25" containerName="proxy-httpd" Sep 30 12:38:14 crc kubenswrapper[5002]: E0930 12:38:14.588791 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="104a5fb7-2a8a-4274-9c0c-84539d4b5c25" containerName="ceilometer-central-agent" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.588800 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="104a5fb7-2a8a-4274-9c0c-84539d4b5c25" containerName="ceilometer-central-agent" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.589023 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="104a5fb7-2a8a-4274-9c0c-84539d4b5c25" containerName="ceilometer-notification-agent" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.589044 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="104a5fb7-2a8a-4274-9c0c-84539d4b5c25" containerName="sg-core" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.589058 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c398da4-8e97-4ee7-83bb-f958c41fabff" containerName="cinder-db-sync" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.589076 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="104a5fb7-2a8a-4274-9c0c-84539d4b5c25" containerName="ceilometer-central-agent" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.589091 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="104a5fb7-2a8a-4274-9c0c-84539d4b5c25" containerName="proxy-httpd" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.593333 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.599838 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.600190 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.619554 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.632713 5002 scope.go:117] "RemoveContainer" containerID="bb71f6e3d773533a93b0a891ae606006d2c610a4f16dc3bca663666f79d39d21" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.691258 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="104a5fb7-2a8a-4274-9c0c-84539d4b5c25" path="/var/lib/kubelet/pods/104a5fb7-2a8a-4274-9c0c-84539d4b5c25/volumes" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.693213 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="beec5517-fd48-4017-9cb3-1102001e9439" path="/var/lib/kubelet/pods/beec5517-fd48-4017-9cb3-1102001e9439/volumes" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.698388 5002 scope.go:117] "RemoveContainer" containerID="45b5be152eee9ce326702fbce8687a56b0cf91fc38c9bfa4bf289fa91859cdba" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.736094 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a446bd7c-bf30-4027-b9d9-9c7087dff156-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a446bd7c-bf30-4027-b9d9-9c7087dff156\") " pod="openstack/ceilometer-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.736141 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a446bd7c-bf30-4027-b9d9-9c7087dff156-config-data\") pod \"ceilometer-0\" (UID: \"a446bd7c-bf30-4027-b9d9-9c7087dff156\") " pod="openstack/ceilometer-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.736168 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a446bd7c-bf30-4027-b9d9-9c7087dff156-log-httpd\") pod \"ceilometer-0\" (UID: \"a446bd7c-bf30-4027-b9d9-9c7087dff156\") " pod="openstack/ceilometer-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.736229 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a446bd7c-bf30-4027-b9d9-9c7087dff156-run-httpd\") pod \"ceilometer-0\" (UID: \"a446bd7c-bf30-4027-b9d9-9c7087dff156\") " pod="openstack/ceilometer-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.736381 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a446bd7c-bf30-4027-b9d9-9c7087dff156-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a446bd7c-bf30-4027-b9d9-9c7087dff156\") " pod="openstack/ceilometer-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.736396 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a446bd7c-bf30-4027-b9d9-9c7087dff156-scripts\") pod \"ceilometer-0\" (UID: \"a446bd7c-bf30-4027-b9d9-9c7087dff156\") " pod="openstack/ceilometer-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.736446 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvdkq\" (UniqueName: \"kubernetes.io/projected/a446bd7c-bf30-4027-b9d9-9c7087dff156-kube-api-access-qvdkq\") pod \"ceilometer-0\" (UID: \"a446bd7c-bf30-4027-b9d9-9c7087dff156\") " pod="openstack/ceilometer-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.748608 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.750611 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.759546 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.760752 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.760856 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.761158 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.763050 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-88hs7" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.794323 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-cq2vq"] Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.795995 5002 scope.go:117] "RemoveContainer" containerID="9bf12d45e3637bb9b062faba5f5b91b102a2e8227cdbbee843aa7d0917caa157" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.796029 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" Sep 30 12:38:14 crc kubenswrapper[5002]: E0930 12:38:14.806419 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9bf12d45e3637bb9b062faba5f5b91b102a2e8227cdbbee843aa7d0917caa157\": container with ID starting with 9bf12d45e3637bb9b062faba5f5b91b102a2e8227cdbbee843aa7d0917caa157 not found: ID does not exist" containerID="9bf12d45e3637bb9b062faba5f5b91b102a2e8227cdbbee843aa7d0917caa157" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.806458 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9bf12d45e3637bb9b062faba5f5b91b102a2e8227cdbbee843aa7d0917caa157"} err="failed to get container status \"9bf12d45e3637bb9b062faba5f5b91b102a2e8227cdbbee843aa7d0917caa157\": rpc error: code = NotFound desc = could not find container \"9bf12d45e3637bb9b062faba5f5b91b102a2e8227cdbbee843aa7d0917caa157\": container with ID starting with 9bf12d45e3637bb9b062faba5f5b91b102a2e8227cdbbee843aa7d0917caa157 not found: ID does not exist" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.806508 5002 scope.go:117] "RemoveContainer" containerID="6ef541b7fdb7fed57f6d0dd7f7ae86a92815309107ad79464970ab63c64aaa8c" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.806592 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-cq2vq"] Sep 30 12:38:14 crc kubenswrapper[5002]: E0930 12:38:14.810711 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ef541b7fdb7fed57f6d0dd7f7ae86a92815309107ad79464970ab63c64aaa8c\": container with ID starting with 6ef541b7fdb7fed57f6d0dd7f7ae86a92815309107ad79464970ab63c64aaa8c not found: ID does not exist" containerID="6ef541b7fdb7fed57f6d0dd7f7ae86a92815309107ad79464970ab63c64aaa8c" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.810769 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ef541b7fdb7fed57f6d0dd7f7ae86a92815309107ad79464970ab63c64aaa8c"} err="failed to get container status \"6ef541b7fdb7fed57f6d0dd7f7ae86a92815309107ad79464970ab63c64aaa8c\": rpc error: code = NotFound desc = could not find container \"6ef541b7fdb7fed57f6d0dd7f7ae86a92815309107ad79464970ab63c64aaa8c\": container with ID starting with 6ef541b7fdb7fed57f6d0dd7f7ae86a92815309107ad79464970ab63c64aaa8c not found: ID does not exist" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.810801 5002 scope.go:117] "RemoveContainer" containerID="bb71f6e3d773533a93b0a891ae606006d2c610a4f16dc3bca663666f79d39d21" Sep 30 12:38:14 crc kubenswrapper[5002]: E0930 12:38:14.811197 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb71f6e3d773533a93b0a891ae606006d2c610a4f16dc3bca663666f79d39d21\": container with ID starting with bb71f6e3d773533a93b0a891ae606006d2c610a4f16dc3bca663666f79d39d21 not found: ID does not exist" containerID="bb71f6e3d773533a93b0a891ae606006d2c610a4f16dc3bca663666f79d39d21" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.811231 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb71f6e3d773533a93b0a891ae606006d2c610a4f16dc3bca663666f79d39d21"} err="failed to get container status \"bb71f6e3d773533a93b0a891ae606006d2c610a4f16dc3bca663666f79d39d21\": rpc error: code = NotFound desc = could not find container \"bb71f6e3d773533a93b0a891ae606006d2c610a4f16dc3bca663666f79d39d21\": container with ID starting with bb71f6e3d773533a93b0a891ae606006d2c610a4f16dc3bca663666f79d39d21 not found: ID does not exist" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.811245 5002 scope.go:117] "RemoveContainer" containerID="45b5be152eee9ce326702fbce8687a56b0cf91fc38c9bfa4bf289fa91859cdba" Sep 30 12:38:14 crc kubenswrapper[5002]: E0930 12:38:14.811426 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45b5be152eee9ce326702fbce8687a56b0cf91fc38c9bfa4bf289fa91859cdba\": container with ID starting with 45b5be152eee9ce326702fbce8687a56b0cf91fc38c9bfa4bf289fa91859cdba not found: ID does not exist" containerID="45b5be152eee9ce326702fbce8687a56b0cf91fc38c9bfa4bf289fa91859cdba" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.811447 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45b5be152eee9ce326702fbce8687a56b0cf91fc38c9bfa4bf289fa91859cdba"} err="failed to get container status \"45b5be152eee9ce326702fbce8687a56b0cf91fc38c9bfa4bf289fa91859cdba\": rpc error: code = NotFound desc = could not find container \"45b5be152eee9ce326702fbce8687a56b0cf91fc38c9bfa4bf289fa91859cdba\": container with ID starting with 45b5be152eee9ce326702fbce8687a56b0cf91fc38c9bfa4bf289fa91859cdba not found: ID does not exist" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.837723 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfdb7\" (UniqueName: \"kubernetes.io/projected/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-kube-api-access-mfdb7\") pod \"cinder-scheduler-0\" (UID: \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.837793 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.837837 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-cq2vq\" (UID: \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\") " pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.837878 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-cq2vq\" (UID: \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\") " pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.837910 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.837942 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a446bd7c-bf30-4027-b9d9-9c7087dff156-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a446bd7c-bf30-4027-b9d9-9c7087dff156\") " pod="openstack/ceilometer-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.837976 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a446bd7c-bf30-4027-b9d9-9c7087dff156-scripts\") pod \"ceilometer-0\" (UID: \"a446bd7c-bf30-4027-b9d9-9c7087dff156\") " pod="openstack/ceilometer-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.838017 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvdkq\" (UniqueName: \"kubernetes.io/projected/a446bd7c-bf30-4027-b9d9-9c7087dff156-kube-api-access-qvdkq\") pod \"ceilometer-0\" (UID: \"a446bd7c-bf30-4027-b9d9-9c7087dff156\") " pod="openstack/ceilometer-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.838053 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-dns-svc\") pod \"dnsmasq-dns-5784cf869f-cq2vq\" (UID: \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\") " pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.838076 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a446bd7c-bf30-4027-b9d9-9c7087dff156-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a446bd7c-bf30-4027-b9d9-9c7087dff156\") " pod="openstack/ceilometer-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.838097 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-config\") pod \"dnsmasq-dns-5784cf869f-cq2vq\" (UID: \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\") " pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.838131 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a446bd7c-bf30-4027-b9d9-9c7087dff156-config-data\") pod \"ceilometer-0\" (UID: \"a446bd7c-bf30-4027-b9d9-9c7087dff156\") " pod="openstack/ceilometer-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.838154 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x59j6\" (UniqueName: \"kubernetes.io/projected/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-kube-api-access-x59j6\") pod \"dnsmasq-dns-5784cf869f-cq2vq\" (UID: \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\") " pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.838193 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a446bd7c-bf30-4027-b9d9-9c7087dff156-log-httpd\") pod \"ceilometer-0\" (UID: \"a446bd7c-bf30-4027-b9d9-9c7087dff156\") " pod="openstack/ceilometer-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.838223 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-config-data\") pod \"cinder-scheduler-0\" (UID: \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.838260 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-cq2vq\" (UID: \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\") " pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.838300 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.838369 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a446bd7c-bf30-4027-b9d9-9c7087dff156-run-httpd\") pod \"ceilometer-0\" (UID: \"a446bd7c-bf30-4027-b9d9-9c7087dff156\") " pod="openstack/ceilometer-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.838407 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-scripts\") pod \"cinder-scheduler-0\" (UID: \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.842402 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a446bd7c-bf30-4027-b9d9-9c7087dff156-log-httpd\") pod \"ceilometer-0\" (UID: \"a446bd7c-bf30-4027-b9d9-9c7087dff156\") " pod="openstack/ceilometer-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.842945 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a446bd7c-bf30-4027-b9d9-9c7087dff156-run-httpd\") pod \"ceilometer-0\" (UID: \"a446bd7c-bf30-4027-b9d9-9c7087dff156\") " pod="openstack/ceilometer-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.846162 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a446bd7c-bf30-4027-b9d9-9c7087dff156-scripts\") pod \"ceilometer-0\" (UID: \"a446bd7c-bf30-4027-b9d9-9c7087dff156\") " pod="openstack/ceilometer-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.846902 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a446bd7c-bf30-4027-b9d9-9c7087dff156-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a446bd7c-bf30-4027-b9d9-9c7087dff156\") " pod="openstack/ceilometer-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.848262 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a446bd7c-bf30-4027-b9d9-9c7087dff156-config-data\") pod \"ceilometer-0\" (UID: \"a446bd7c-bf30-4027-b9d9-9c7087dff156\") " pod="openstack/ceilometer-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.861750 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a446bd7c-bf30-4027-b9d9-9c7087dff156-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a446bd7c-bf30-4027-b9d9-9c7087dff156\") " pod="openstack/ceilometer-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.878694 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.880048 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.883101 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.892824 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvdkq\" (UniqueName: \"kubernetes.io/projected/a446bd7c-bf30-4027-b9d9-9c7087dff156-kube-api-access-qvdkq\") pod \"ceilometer-0\" (UID: \"a446bd7c-bf30-4027-b9d9-9c7087dff156\") " pod="openstack/ceilometer-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.898347 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.930114 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.939382 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-config-data\") pod \"cinder-scheduler-0\" (UID: \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.939432 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-cq2vq\" (UID: \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\") " pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.939488 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64484577-e70a-44bd-aefc-27864dccdc4c-logs\") pod \"cinder-api-0\" (UID: \"64484577-e70a-44bd-aefc-27864dccdc4c\") " pod="openstack/cinder-api-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.939511 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/64484577-e70a-44bd-aefc-27864dccdc4c-config-data-custom\") pod \"cinder-api-0\" (UID: \"64484577-e70a-44bd-aefc-27864dccdc4c\") " pod="openstack/cinder-api-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.939525 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64484577-e70a-44bd-aefc-27864dccdc4c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"64484577-e70a-44bd-aefc-27864dccdc4c\") " pod="openstack/cinder-api-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.939550 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.939585 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-scripts\") pod \"cinder-scheduler-0\" (UID: \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.939604 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64484577-e70a-44bd-aefc-27864dccdc4c-config-data\") pod \"cinder-api-0\" (UID: \"64484577-e70a-44bd-aefc-27864dccdc4c\") " pod="openstack/cinder-api-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.939738 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfdb7\" (UniqueName: \"kubernetes.io/projected/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-kube-api-access-mfdb7\") pod \"cinder-scheduler-0\" (UID: \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.939769 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.939799 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/64484577-e70a-44bd-aefc-27864dccdc4c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"64484577-e70a-44bd-aefc-27864dccdc4c\") " pod="openstack/cinder-api-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.939828 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-cq2vq\" (UID: \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\") " pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.939846 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6q47\" (UniqueName: \"kubernetes.io/projected/64484577-e70a-44bd-aefc-27864dccdc4c-kube-api-access-b6q47\") pod \"cinder-api-0\" (UID: \"64484577-e70a-44bd-aefc-27864dccdc4c\") " pod="openstack/cinder-api-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.939864 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-cq2vq\" (UID: \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\") " pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.939882 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.939926 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-dns-svc\") pod \"dnsmasq-dns-5784cf869f-cq2vq\" (UID: \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\") " pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.939946 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-config\") pod \"dnsmasq-dns-5784cf869f-cq2vq\" (UID: \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\") " pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.939973 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x59j6\" (UniqueName: \"kubernetes.io/projected/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-kube-api-access-x59j6\") pod \"dnsmasq-dns-5784cf869f-cq2vq\" (UID: \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\") " pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.939989 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64484577-e70a-44bd-aefc-27864dccdc4c-scripts\") pod \"cinder-api-0\" (UID: \"64484577-e70a-44bd-aefc-27864dccdc4c\") " pod="openstack/cinder-api-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.943189 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-cq2vq\" (UID: \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\") " pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.948227 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.948819 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-config-data\") pod \"cinder-scheduler-0\" (UID: \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.949002 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-cq2vq\" (UID: \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\") " pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.949334 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.949675 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-config\") pod \"dnsmasq-dns-5784cf869f-cq2vq\" (UID: \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\") " pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.949947 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-dns-svc\") pod \"dnsmasq-dns-5784cf869f-cq2vq\" (UID: \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\") " pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.955714 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-cq2vq\" (UID: \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\") " pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.959037 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-scripts\") pod \"cinder-scheduler-0\" (UID: \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.959084 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.968741 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfdb7\" (UniqueName: \"kubernetes.io/projected/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-kube-api-access-mfdb7\") pod \"cinder-scheduler-0\" (UID: \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:14 crc kubenswrapper[5002]: I0930 12:38:14.972913 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x59j6\" (UniqueName: \"kubernetes.io/projected/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-kube-api-access-x59j6\") pod \"dnsmasq-dns-5784cf869f-cq2vq\" (UID: \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\") " pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.042435 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64484577-e70a-44bd-aefc-27864dccdc4c-config-data\") pod \"cinder-api-0\" (UID: \"64484577-e70a-44bd-aefc-27864dccdc4c\") " pod="openstack/cinder-api-0" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.042865 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/64484577-e70a-44bd-aefc-27864dccdc4c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"64484577-e70a-44bd-aefc-27864dccdc4c\") " pod="openstack/cinder-api-0" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.042911 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6q47\" (UniqueName: \"kubernetes.io/projected/64484577-e70a-44bd-aefc-27864dccdc4c-kube-api-access-b6q47\") pod \"cinder-api-0\" (UID: \"64484577-e70a-44bd-aefc-27864dccdc4c\") " pod="openstack/cinder-api-0" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.042985 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64484577-e70a-44bd-aefc-27864dccdc4c-scripts\") pod \"cinder-api-0\" (UID: \"64484577-e70a-44bd-aefc-27864dccdc4c\") " pod="openstack/cinder-api-0" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.043031 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64484577-e70a-44bd-aefc-27864dccdc4c-logs\") pod \"cinder-api-0\" (UID: \"64484577-e70a-44bd-aefc-27864dccdc4c\") " pod="openstack/cinder-api-0" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.043047 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/64484577-e70a-44bd-aefc-27864dccdc4c-config-data-custom\") pod \"cinder-api-0\" (UID: \"64484577-e70a-44bd-aefc-27864dccdc4c\") " pod="openstack/cinder-api-0" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.043061 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64484577-e70a-44bd-aefc-27864dccdc4c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"64484577-e70a-44bd-aefc-27864dccdc4c\") " pod="openstack/cinder-api-0" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.044515 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64484577-e70a-44bd-aefc-27864dccdc4c-logs\") pod \"cinder-api-0\" (UID: \"64484577-e70a-44bd-aefc-27864dccdc4c\") " pod="openstack/cinder-api-0" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.053032 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64484577-e70a-44bd-aefc-27864dccdc4c-config-data\") pod \"cinder-api-0\" (UID: \"64484577-e70a-44bd-aefc-27864dccdc4c\") " pod="openstack/cinder-api-0" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.053117 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/64484577-e70a-44bd-aefc-27864dccdc4c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"64484577-e70a-44bd-aefc-27864dccdc4c\") " pod="openstack/cinder-api-0" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.062191 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64484577-e70a-44bd-aefc-27864dccdc4c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"64484577-e70a-44bd-aefc-27864dccdc4c\") " pod="openstack/cinder-api-0" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.062824 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/64484577-e70a-44bd-aefc-27864dccdc4c-config-data-custom\") pod \"cinder-api-0\" (UID: \"64484577-e70a-44bd-aefc-27864dccdc4c\") " pod="openstack/cinder-api-0" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.062978 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64484577-e70a-44bd-aefc-27864dccdc4c-scripts\") pod \"cinder-api-0\" (UID: \"64484577-e70a-44bd-aefc-27864dccdc4c\") " pod="openstack/cinder-api-0" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.084238 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6q47\" (UniqueName: \"kubernetes.io/projected/64484577-e70a-44bd-aefc-27864dccdc4c-kube-api-access-b6q47\") pod \"cinder-api-0\" (UID: \"64484577-e70a-44bd-aefc-27864dccdc4c\") " pod="openstack/cinder-api-0" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.099855 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.164106 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.213754 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.386889 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-rb685" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.450691 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5vgmd\" (UniqueName: \"kubernetes.io/projected/092e2ea7-c4f1-4ba7-b9e8-064cbbfa53aa-kube-api-access-5vgmd\") pod \"092e2ea7-c4f1-4ba7-b9e8-064cbbfa53aa\" (UID: \"092e2ea7-c4f1-4ba7-b9e8-064cbbfa53aa\") " Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.471138 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-wldrd" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.474786 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/092e2ea7-c4f1-4ba7-b9e8-064cbbfa53aa-kube-api-access-5vgmd" (OuterVolumeSpecName: "kube-api-access-5vgmd") pod "092e2ea7-c4f1-4ba7-b9e8-064cbbfa53aa" (UID: "092e2ea7-c4f1-4ba7-b9e8-064cbbfa53aa"). InnerVolumeSpecName "kube-api-access-5vgmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.493787 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-6khcg" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.552571 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zbmzc\" (UniqueName: \"kubernetes.io/projected/a20a6156-ca1d-4c5c-b98f-b4e062bf4e9c-kube-api-access-zbmzc\") pod \"a20a6156-ca1d-4c5c-b98f-b4e062bf4e9c\" (UID: \"a20a6156-ca1d-4c5c-b98f-b4e062bf4e9c\") " Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.552612 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b5zmm\" (UniqueName: \"kubernetes.io/projected/93fde69b-7152-4a47-8d1d-fe6aa7376882-kube-api-access-b5zmm\") pod \"93fde69b-7152-4a47-8d1d-fe6aa7376882\" (UID: \"93fde69b-7152-4a47-8d1d-fe6aa7376882\") " Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.552974 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5vgmd\" (UniqueName: \"kubernetes.io/projected/092e2ea7-c4f1-4ba7-b9e8-064cbbfa53aa-kube-api-access-5vgmd\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.557366 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93fde69b-7152-4a47-8d1d-fe6aa7376882-kube-api-access-b5zmm" (OuterVolumeSpecName: "kube-api-access-b5zmm") pod "93fde69b-7152-4a47-8d1d-fe6aa7376882" (UID: "93fde69b-7152-4a47-8d1d-fe6aa7376882"). InnerVolumeSpecName "kube-api-access-b5zmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.576708 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a20a6156-ca1d-4c5c-b98f-b4e062bf4e9c-kube-api-access-zbmzc" (OuterVolumeSpecName: "kube-api-access-zbmzc") pod "a20a6156-ca1d-4c5c-b98f-b4e062bf4e9c" (UID: "a20a6156-ca1d-4c5c-b98f-b4e062bf4e9c"). InnerVolumeSpecName "kube-api-access-zbmzc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.602370 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.604689 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-6khcg" event={"ID":"93fde69b-7152-4a47-8d1d-fe6aa7376882","Type":"ContainerDied","Data":"47bfca6a5446f2085447b2740c115f277d2a62e0f4a0b61c810b8337c39fc855"} Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.604726 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="47bfca6a5446f2085447b2740c115f277d2a62e0f4a0b61c810b8337c39fc855" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.604787 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-6khcg" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.646651 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6f8969955b-64x4f" event={"ID":"c0d09801-8aec-455d-8e90-cad2e5f5a04e","Type":"ContainerStarted","Data":"951c072b63606261184c1400abbc43a4bf3fd1437c3ded273c8907728d6b0da2"} Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.646691 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6f8969955b-64x4f" event={"ID":"c0d09801-8aec-455d-8e90-cad2e5f5a04e","Type":"ContainerStarted","Data":"4d2f34d12d7a60e1b7ceebb1e74c7274feb69dbb4b4eb20d148ab968c3cbf755"} Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.655270 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zbmzc\" (UniqueName: \"kubernetes.io/projected/a20a6156-ca1d-4c5c-b98f-b4e062bf4e9c-kube-api-access-zbmzc\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.655294 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b5zmm\" (UniqueName: \"kubernetes.io/projected/93fde69b-7152-4a47-8d1d-fe6aa7376882-kube-api-access-b5zmm\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.691768 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-wldrd" event={"ID":"a20a6156-ca1d-4c5c-b98f-b4e062bf4e9c","Type":"ContainerDied","Data":"0f0b80a309ee68320b12139cf2ba5158bf0fd5af7a7a7fa0b57da5ec680feeff"} Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.691985 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f0b80a309ee68320b12139cf2ba5158bf0fd5af7a7a7fa0b57da5ec680feeff" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.692132 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-wldrd" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.724285 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-rb685" event={"ID":"092e2ea7-c4f1-4ba7-b9e8-064cbbfa53aa","Type":"ContainerDied","Data":"c711b13901448891370eb547d8714f49380224669929b51dacac9bd96d14175a"} Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.724315 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c711b13901448891370eb547d8714f49380224669929b51dacac9bd96d14175a" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.724368 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-rb685" Sep 30 12:38:15 crc kubenswrapper[5002]: I0930 12:38:15.960234 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 12:38:16 crc kubenswrapper[5002]: W0930 12:38:16.058108 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfa385ab0_fe00_47cb_be3c_0f643bd2a3dc.slice/crio-cd867b89bdb5d3a3ce9100106a5f48b103ebb9c853b56d5c2ec9d6b0a42d562c WatchSource:0}: Error finding container cd867b89bdb5d3a3ce9100106a5f48b103ebb9c853b56d5c2ec9d6b0a42d562c: Status 404 returned error can't find the container with id cd867b89bdb5d3a3ce9100106a5f48b103ebb9c853b56d5c2ec9d6b0a42d562c Sep 30 12:38:16 crc kubenswrapper[5002]: I0930 12:38:16.065309 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-cq2vq"] Sep 30 12:38:16 crc kubenswrapper[5002]: I0930 12:38:16.076721 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 12:38:18 crc kubenswrapper[5002]: I0930 12:38:16.750643 5002 generic.go:334] "Generic (PLEG): container finished" podID="fa385ab0-fe00-47cb-be3c-0f643bd2a3dc" containerID="1dbc3dc2eec7aad68d60b3e1467b5c442a47d143f4db5f09972403738b2479ac" exitCode=0 Sep 30 12:38:18 crc kubenswrapper[5002]: I0930 12:38:16.751210 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" event={"ID":"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc","Type":"ContainerDied","Data":"1dbc3dc2eec7aad68d60b3e1467b5c442a47d143f4db5f09972403738b2479ac"} Sep 30 12:38:18 crc kubenswrapper[5002]: I0930 12:38:16.751274 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" event={"ID":"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc","Type":"ContainerStarted","Data":"cd867b89bdb5d3a3ce9100106a5f48b103ebb9c853b56d5c2ec9d6b0a42d562c"} Sep 30 12:38:18 crc kubenswrapper[5002]: I0930 12:38:16.757022 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a446bd7c-bf30-4027-b9d9-9c7087dff156","Type":"ContainerStarted","Data":"8f5325aab6e4fa4343a483237b57a41a4f3025769f3bf19b8f0b56156e4b5d9c"} Sep 30 12:38:18 crc kubenswrapper[5002]: I0930 12:38:16.757053 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a446bd7c-bf30-4027-b9d9-9c7087dff156","Type":"ContainerStarted","Data":"e40a34f5a65b956efb645cbb07c4e1a560b7f1c6cdea79270016821cbb221783"} Sep 30 12:38:18 crc kubenswrapper[5002]: I0930 12:38:16.758428 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6f8969955b-64x4f" event={"ID":"c0d09801-8aec-455d-8e90-cad2e5f5a04e","Type":"ContainerStarted","Data":"17a65f58147cc81800967eb3fe81d512f6c1ae32a964e842025af03c04839a68"} Sep 30 12:38:18 crc kubenswrapper[5002]: I0930 12:38:16.758603 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-6f8969955b-64x4f" Sep 30 12:38:18 crc kubenswrapper[5002]: I0930 12:38:16.759126 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-6f8969955b-64x4f" Sep 30 12:38:18 crc kubenswrapper[5002]: I0930 12:38:16.759732 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"64484577-e70a-44bd-aefc-27864dccdc4c","Type":"ContainerStarted","Data":"9b0ce4bdfe7214ae5c1ba034bbeca1feba0ee723c2f6ce678267ac59f5a45485"} Sep 30 12:38:18 crc kubenswrapper[5002]: I0930 12:38:16.760437 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3a7935aa-3f70-4f2c-94ba-5bc122c7f028","Type":"ContainerStarted","Data":"abfc820957916df1b7df28480afdd8bdfb1518f68af932246ed74e63441cc87a"} Sep 30 12:38:18 crc kubenswrapper[5002]: I0930 12:38:16.838779 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-6f8969955b-64x4f" podStartSLOduration=3.838730129 podStartE2EDuration="3.838730129s" podCreationTimestamp="2025-09-30 12:38:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:38:16.834164093 +0000 UTC m=+1071.083846249" watchObservedRunningTime="2025-09-30 12:38:16.838730129 +0000 UTC m=+1071.088412295" Sep 30 12:38:18 crc kubenswrapper[5002]: I0930 12:38:17.334584 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 30 12:38:18 crc kubenswrapper[5002]: I0930 12:38:17.771822 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" event={"ID":"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc","Type":"ContainerStarted","Data":"3ae6bbb38d68098ce60c1159e19db2d9a15965fc0ea0cda89f3f0e7806e19466"} Sep 30 12:38:18 crc kubenswrapper[5002]: I0930 12:38:17.773296 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" Sep 30 12:38:18 crc kubenswrapper[5002]: I0930 12:38:17.776265 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"64484577-e70a-44bd-aefc-27864dccdc4c","Type":"ContainerStarted","Data":"0a5cef2b9dc98f83c2fcd30293e079f3bb2d78c757f79a1176d08f7b732a8954"} Sep 30 12:38:18 crc kubenswrapper[5002]: I0930 12:38:17.801176 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" podStartSLOduration=3.801158473 podStartE2EDuration="3.801158473s" podCreationTimestamp="2025-09-30 12:38:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:38:17.798549966 +0000 UTC m=+1072.048232122" watchObservedRunningTime="2025-09-30 12:38:17.801158473 +0000 UTC m=+1072.050840619" Sep 30 12:38:18 crc kubenswrapper[5002]: I0930 12:38:18.795991 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a446bd7c-bf30-4027-b9d9-9c7087dff156","Type":"ContainerStarted","Data":"34e4053fbd3e8b9d086d98db68f823df332fd0c31283a755880f78d523756a4c"} Sep 30 12:38:18 crc kubenswrapper[5002]: I0930 12:38:18.803796 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"64484577-e70a-44bd-aefc-27864dccdc4c","Type":"ContainerStarted","Data":"bfe2e1dabf4ca2f0bdd71b59609667981049f0776424a0356b28b4a2d97b1869"} Sep 30 12:38:18 crc kubenswrapper[5002]: I0930 12:38:18.803856 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="64484577-e70a-44bd-aefc-27864dccdc4c" containerName="cinder-api-log" containerID="cri-o://0a5cef2b9dc98f83c2fcd30293e079f3bb2d78c757f79a1176d08f7b732a8954" gracePeriod=30 Sep 30 12:38:18 crc kubenswrapper[5002]: I0930 12:38:18.803951 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="64484577-e70a-44bd-aefc-27864dccdc4c" containerName="cinder-api" containerID="cri-o://bfe2e1dabf4ca2f0bdd71b59609667981049f0776424a0356b28b4a2d97b1869" gracePeriod=30 Sep 30 12:38:18 crc kubenswrapper[5002]: I0930 12:38:18.804041 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 30 12:38:18 crc kubenswrapper[5002]: I0930 12:38:18.810372 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3a7935aa-3f70-4f2c-94ba-5bc122c7f028","Type":"ContainerStarted","Data":"6ea4672c16802a15d413efd5848fef1f96bf45563ce6122edb91280564f54666"} Sep 30 12:38:18 crc kubenswrapper[5002]: I0930 12:38:18.840055 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.840030494 podStartE2EDuration="4.840030494s" podCreationTimestamp="2025-09-30 12:38:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:38:18.828204693 +0000 UTC m=+1073.077886849" watchObservedRunningTime="2025-09-30 12:38:18.840030494 +0000 UTC m=+1073.089712660" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.405831 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.470605 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64484577-e70a-44bd-aefc-27864dccdc4c-logs\") pod \"64484577-e70a-44bd-aefc-27864dccdc4c\" (UID: \"64484577-e70a-44bd-aefc-27864dccdc4c\") " Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.470769 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/64484577-e70a-44bd-aefc-27864dccdc4c-config-data-custom\") pod \"64484577-e70a-44bd-aefc-27864dccdc4c\" (UID: \"64484577-e70a-44bd-aefc-27864dccdc4c\") " Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.470791 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64484577-e70a-44bd-aefc-27864dccdc4c-config-data\") pod \"64484577-e70a-44bd-aefc-27864dccdc4c\" (UID: \"64484577-e70a-44bd-aefc-27864dccdc4c\") " Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.470854 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/64484577-e70a-44bd-aefc-27864dccdc4c-etc-machine-id\") pod \"64484577-e70a-44bd-aefc-27864dccdc4c\" (UID: \"64484577-e70a-44bd-aefc-27864dccdc4c\") " Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.470887 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b6q47\" (UniqueName: \"kubernetes.io/projected/64484577-e70a-44bd-aefc-27864dccdc4c-kube-api-access-b6q47\") pod \"64484577-e70a-44bd-aefc-27864dccdc4c\" (UID: \"64484577-e70a-44bd-aefc-27864dccdc4c\") " Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.470906 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64484577-e70a-44bd-aefc-27864dccdc4c-scripts\") pod \"64484577-e70a-44bd-aefc-27864dccdc4c\" (UID: \"64484577-e70a-44bd-aefc-27864dccdc4c\") " Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.470973 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64484577-e70a-44bd-aefc-27864dccdc4c-combined-ca-bundle\") pod \"64484577-e70a-44bd-aefc-27864dccdc4c\" (UID: \"64484577-e70a-44bd-aefc-27864dccdc4c\") " Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.472240 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64484577-e70a-44bd-aefc-27864dccdc4c-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "64484577-e70a-44bd-aefc-27864dccdc4c" (UID: "64484577-e70a-44bd-aefc-27864dccdc4c"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.472612 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64484577-e70a-44bd-aefc-27864dccdc4c-logs" (OuterVolumeSpecName: "logs") pod "64484577-e70a-44bd-aefc-27864dccdc4c" (UID: "64484577-e70a-44bd-aefc-27864dccdc4c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.476919 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64484577-e70a-44bd-aefc-27864dccdc4c-scripts" (OuterVolumeSpecName: "scripts") pod "64484577-e70a-44bd-aefc-27864dccdc4c" (UID: "64484577-e70a-44bd-aefc-27864dccdc4c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.477563 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64484577-e70a-44bd-aefc-27864dccdc4c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "64484577-e70a-44bd-aefc-27864dccdc4c" (UID: "64484577-e70a-44bd-aefc-27864dccdc4c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.477634 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64484577-e70a-44bd-aefc-27864dccdc4c-kube-api-access-b6q47" (OuterVolumeSpecName: "kube-api-access-b6q47") pod "64484577-e70a-44bd-aefc-27864dccdc4c" (UID: "64484577-e70a-44bd-aefc-27864dccdc4c"). InnerVolumeSpecName "kube-api-access-b6q47". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.498331 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64484577-e70a-44bd-aefc-27864dccdc4c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "64484577-e70a-44bd-aefc-27864dccdc4c" (UID: "64484577-e70a-44bd-aefc-27864dccdc4c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.536531 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64484577-e70a-44bd-aefc-27864dccdc4c-config-data" (OuterVolumeSpecName: "config-data") pod "64484577-e70a-44bd-aefc-27864dccdc4c" (UID: "64484577-e70a-44bd-aefc-27864dccdc4c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.573389 5002 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/64484577-e70a-44bd-aefc-27864dccdc4c-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.573422 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64484577-e70a-44bd-aefc-27864dccdc4c-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.573431 5002 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/64484577-e70a-44bd-aefc-27864dccdc4c-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.573441 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b6q47\" (UniqueName: \"kubernetes.io/projected/64484577-e70a-44bd-aefc-27864dccdc4c-kube-api-access-b6q47\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.573451 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64484577-e70a-44bd-aefc-27864dccdc4c-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.573459 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64484577-e70a-44bd-aefc-27864dccdc4c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.573502 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64484577-e70a-44bd-aefc-27864dccdc4c-logs\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.822923 5002 generic.go:334] "Generic (PLEG): container finished" podID="64484577-e70a-44bd-aefc-27864dccdc4c" containerID="bfe2e1dabf4ca2f0bdd71b59609667981049f0776424a0356b28b4a2d97b1869" exitCode=0 Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.822983 5002 generic.go:334] "Generic (PLEG): container finished" podID="64484577-e70a-44bd-aefc-27864dccdc4c" containerID="0a5cef2b9dc98f83c2fcd30293e079f3bb2d78c757f79a1176d08f7b732a8954" exitCode=143 Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.823038 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"64484577-e70a-44bd-aefc-27864dccdc4c","Type":"ContainerDied","Data":"bfe2e1dabf4ca2f0bdd71b59609667981049f0776424a0356b28b4a2d97b1869"} Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.823102 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"64484577-e70a-44bd-aefc-27864dccdc4c","Type":"ContainerDied","Data":"0a5cef2b9dc98f83c2fcd30293e079f3bb2d78c757f79a1176d08f7b732a8954"} Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.823117 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"64484577-e70a-44bd-aefc-27864dccdc4c","Type":"ContainerDied","Data":"9b0ce4bdfe7214ae5c1ba034bbeca1feba0ee723c2f6ce678267ac59f5a45485"} Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.823166 5002 scope.go:117] "RemoveContainer" containerID="bfe2e1dabf4ca2f0bdd71b59609667981049f0776424a0356b28b4a2d97b1869" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.823465 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.825330 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3a7935aa-3f70-4f2c-94ba-5bc122c7f028","Type":"ContainerStarted","Data":"32bc3ef204bd519ca25bb5737cf54719111d9cd5f520797285287a8325913608"} Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.828129 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a446bd7c-bf30-4027-b9d9-9c7087dff156","Type":"ContainerStarted","Data":"61f2ba70d12da07efab04d7b5795905b7b1e32bbd84eccb5ae1c512962be419b"} Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.872277 5002 scope.go:117] "RemoveContainer" containerID="0a5cef2b9dc98f83c2fcd30293e079f3bb2d78c757f79a1176d08f7b732a8954" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.890186 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.283461407 podStartE2EDuration="5.890167392s" podCreationTimestamp="2025-09-30 12:38:14 +0000 UTC" firstStartedPulling="2025-09-30 12:38:15.974041844 +0000 UTC m=+1070.223724000" lastFinishedPulling="2025-09-30 12:38:17.580747839 +0000 UTC m=+1071.830429985" observedRunningTime="2025-09-30 12:38:19.8653296 +0000 UTC m=+1074.115011756" watchObservedRunningTime="2025-09-30 12:38:19.890167392 +0000 UTC m=+1074.139849538" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.891534 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.891855 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="5cb9bff6-f879-4cb0-87ca-9a1879f8c516" containerName="glance-log" containerID="cri-o://3df199381973c968502aa99e0c1d592eb10b7346112a85f99ec1f4bd5c28c8c7" gracePeriod=30 Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.892376 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="5cb9bff6-f879-4cb0-87ca-9a1879f8c516" containerName="glance-httpd" containerID="cri-o://c35f92f62bacb2046bb207b031c37b392d4486f818a61d44e1501770052babf0" gracePeriod=30 Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.912580 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.923171 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.926792 5002 scope.go:117] "RemoveContainer" containerID="bfe2e1dabf4ca2f0bdd71b59609667981049f0776424a0356b28b4a2d97b1869" Sep 30 12:38:19 crc kubenswrapper[5002]: E0930 12:38:19.927344 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfe2e1dabf4ca2f0bdd71b59609667981049f0776424a0356b28b4a2d97b1869\": container with ID starting with bfe2e1dabf4ca2f0bdd71b59609667981049f0776424a0356b28b4a2d97b1869 not found: ID does not exist" containerID="bfe2e1dabf4ca2f0bdd71b59609667981049f0776424a0356b28b4a2d97b1869" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.927380 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfe2e1dabf4ca2f0bdd71b59609667981049f0776424a0356b28b4a2d97b1869"} err="failed to get container status \"bfe2e1dabf4ca2f0bdd71b59609667981049f0776424a0356b28b4a2d97b1869\": rpc error: code = NotFound desc = could not find container \"bfe2e1dabf4ca2f0bdd71b59609667981049f0776424a0356b28b4a2d97b1869\": container with ID starting with bfe2e1dabf4ca2f0bdd71b59609667981049f0776424a0356b28b4a2d97b1869 not found: ID does not exist" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.927403 5002 scope.go:117] "RemoveContainer" containerID="0a5cef2b9dc98f83c2fcd30293e079f3bb2d78c757f79a1176d08f7b732a8954" Sep 30 12:38:19 crc kubenswrapper[5002]: E0930 12:38:19.930740 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a5cef2b9dc98f83c2fcd30293e079f3bb2d78c757f79a1176d08f7b732a8954\": container with ID starting with 0a5cef2b9dc98f83c2fcd30293e079f3bb2d78c757f79a1176d08f7b732a8954 not found: ID does not exist" containerID="0a5cef2b9dc98f83c2fcd30293e079f3bb2d78c757f79a1176d08f7b732a8954" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.930855 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a5cef2b9dc98f83c2fcd30293e079f3bb2d78c757f79a1176d08f7b732a8954"} err="failed to get container status \"0a5cef2b9dc98f83c2fcd30293e079f3bb2d78c757f79a1176d08f7b732a8954\": rpc error: code = NotFound desc = could not find container \"0a5cef2b9dc98f83c2fcd30293e079f3bb2d78c757f79a1176d08f7b732a8954\": container with ID starting with 0a5cef2b9dc98f83c2fcd30293e079f3bb2d78c757f79a1176d08f7b732a8954 not found: ID does not exist" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.930964 5002 scope.go:117] "RemoveContainer" containerID="bfe2e1dabf4ca2f0bdd71b59609667981049f0776424a0356b28b4a2d97b1869" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.971400 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfe2e1dabf4ca2f0bdd71b59609667981049f0776424a0356b28b4a2d97b1869"} err="failed to get container status \"bfe2e1dabf4ca2f0bdd71b59609667981049f0776424a0356b28b4a2d97b1869\": rpc error: code = NotFound desc = could not find container \"bfe2e1dabf4ca2f0bdd71b59609667981049f0776424a0356b28b4a2d97b1869\": container with ID starting with bfe2e1dabf4ca2f0bdd71b59609667981049f0776424a0356b28b4a2d97b1869 not found: ID does not exist" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.971450 5002 scope.go:117] "RemoveContainer" containerID="0a5cef2b9dc98f83c2fcd30293e079f3bb2d78c757f79a1176d08f7b732a8954" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.972165 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a5cef2b9dc98f83c2fcd30293e079f3bb2d78c757f79a1176d08f7b732a8954"} err="failed to get container status \"0a5cef2b9dc98f83c2fcd30293e079f3bb2d78c757f79a1176d08f7b732a8954\": rpc error: code = NotFound desc = could not find container \"0a5cef2b9dc98f83c2fcd30293e079f3bb2d78c757f79a1176d08f7b732a8954\": container with ID starting with 0a5cef2b9dc98f83c2fcd30293e079f3bb2d78c757f79a1176d08f7b732a8954 not found: ID does not exist" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.982037 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 30 12:38:19 crc kubenswrapper[5002]: E0930 12:38:19.983070 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64484577-e70a-44bd-aefc-27864dccdc4c" containerName="cinder-api-log" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.983105 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="64484577-e70a-44bd-aefc-27864dccdc4c" containerName="cinder-api-log" Sep 30 12:38:19 crc kubenswrapper[5002]: E0930 12:38:19.983142 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93fde69b-7152-4a47-8d1d-fe6aa7376882" containerName="mariadb-database-create" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.983152 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="93fde69b-7152-4a47-8d1d-fe6aa7376882" containerName="mariadb-database-create" Sep 30 12:38:19 crc kubenswrapper[5002]: E0930 12:38:19.983205 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64484577-e70a-44bd-aefc-27864dccdc4c" containerName="cinder-api" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.983215 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="64484577-e70a-44bd-aefc-27864dccdc4c" containerName="cinder-api" Sep 30 12:38:19 crc kubenswrapper[5002]: E0930 12:38:19.983241 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="092e2ea7-c4f1-4ba7-b9e8-064cbbfa53aa" containerName="mariadb-database-create" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.983249 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="092e2ea7-c4f1-4ba7-b9e8-064cbbfa53aa" containerName="mariadb-database-create" Sep 30 12:38:19 crc kubenswrapper[5002]: E0930 12:38:19.983269 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a20a6156-ca1d-4c5c-b98f-b4e062bf4e9c" containerName="mariadb-database-create" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.983277 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a20a6156-ca1d-4c5c-b98f-b4e062bf4e9c" containerName="mariadb-database-create" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.983876 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a20a6156-ca1d-4c5c-b98f-b4e062bf4e9c" containerName="mariadb-database-create" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.983897 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="64484577-e70a-44bd-aefc-27864dccdc4c" containerName="cinder-api-log" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.983930 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="93fde69b-7152-4a47-8d1d-fe6aa7376882" containerName="mariadb-database-create" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.983963 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="092e2ea7-c4f1-4ba7-b9e8-064cbbfa53aa" containerName="mariadb-database-create" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.983979 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="64484577-e70a-44bd-aefc-27864dccdc4c" containerName="cinder-api" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.985989 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.992274 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.992954 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 30 12:38:19 crc kubenswrapper[5002]: I0930 12:38:19.992981 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.005745 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.087962 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f71d6351-0ed1-4047-bffa-cc1020e38ecf-public-tls-certs\") pod \"cinder-api-0\" (UID: \"f71d6351-0ed1-4047-bffa-cc1020e38ecf\") " pod="openstack/cinder-api-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.088032 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f71d6351-0ed1-4047-bffa-cc1020e38ecf-config-data-custom\") pod \"cinder-api-0\" (UID: \"f71d6351-0ed1-4047-bffa-cc1020e38ecf\") " pod="openstack/cinder-api-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.088138 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f71d6351-0ed1-4047-bffa-cc1020e38ecf-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f71d6351-0ed1-4047-bffa-cc1020e38ecf\") " pod="openstack/cinder-api-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.088163 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f71d6351-0ed1-4047-bffa-cc1020e38ecf-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"f71d6351-0ed1-4047-bffa-cc1020e38ecf\") " pod="openstack/cinder-api-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.088206 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f71d6351-0ed1-4047-bffa-cc1020e38ecf-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f71d6351-0ed1-4047-bffa-cc1020e38ecf\") " pod="openstack/cinder-api-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.088231 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f71d6351-0ed1-4047-bffa-cc1020e38ecf-config-data\") pod \"cinder-api-0\" (UID: \"f71d6351-0ed1-4047-bffa-cc1020e38ecf\") " pod="openstack/cinder-api-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.088277 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f71d6351-0ed1-4047-bffa-cc1020e38ecf-scripts\") pod \"cinder-api-0\" (UID: \"f71d6351-0ed1-4047-bffa-cc1020e38ecf\") " pod="openstack/cinder-api-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.088303 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkncj\" (UniqueName: \"kubernetes.io/projected/f71d6351-0ed1-4047-bffa-cc1020e38ecf-kube-api-access-zkncj\") pod \"cinder-api-0\" (UID: \"f71d6351-0ed1-4047-bffa-cc1020e38ecf\") " pod="openstack/cinder-api-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.088385 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f71d6351-0ed1-4047-bffa-cc1020e38ecf-logs\") pod \"cinder-api-0\" (UID: \"f71d6351-0ed1-4047-bffa-cc1020e38ecf\") " pod="openstack/cinder-api-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.103624 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.189717 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f71d6351-0ed1-4047-bffa-cc1020e38ecf-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f71d6351-0ed1-4047-bffa-cc1020e38ecf\") " pod="openstack/cinder-api-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.189770 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f71d6351-0ed1-4047-bffa-cc1020e38ecf-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"f71d6351-0ed1-4047-bffa-cc1020e38ecf\") " pod="openstack/cinder-api-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.189834 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f71d6351-0ed1-4047-bffa-cc1020e38ecf-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f71d6351-0ed1-4047-bffa-cc1020e38ecf\") " pod="openstack/cinder-api-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.189856 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f71d6351-0ed1-4047-bffa-cc1020e38ecf-config-data\") pod \"cinder-api-0\" (UID: \"f71d6351-0ed1-4047-bffa-cc1020e38ecf\") " pod="openstack/cinder-api-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.189907 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f71d6351-0ed1-4047-bffa-cc1020e38ecf-scripts\") pod \"cinder-api-0\" (UID: \"f71d6351-0ed1-4047-bffa-cc1020e38ecf\") " pod="openstack/cinder-api-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.189932 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkncj\" (UniqueName: \"kubernetes.io/projected/f71d6351-0ed1-4047-bffa-cc1020e38ecf-kube-api-access-zkncj\") pod \"cinder-api-0\" (UID: \"f71d6351-0ed1-4047-bffa-cc1020e38ecf\") " pod="openstack/cinder-api-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.189988 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f71d6351-0ed1-4047-bffa-cc1020e38ecf-logs\") pod \"cinder-api-0\" (UID: \"f71d6351-0ed1-4047-bffa-cc1020e38ecf\") " pod="openstack/cinder-api-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.190039 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f71d6351-0ed1-4047-bffa-cc1020e38ecf-public-tls-certs\") pod \"cinder-api-0\" (UID: \"f71d6351-0ed1-4047-bffa-cc1020e38ecf\") " pod="openstack/cinder-api-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.190082 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f71d6351-0ed1-4047-bffa-cc1020e38ecf-config-data-custom\") pod \"cinder-api-0\" (UID: \"f71d6351-0ed1-4047-bffa-cc1020e38ecf\") " pod="openstack/cinder-api-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.190806 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f71d6351-0ed1-4047-bffa-cc1020e38ecf-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f71d6351-0ed1-4047-bffa-cc1020e38ecf\") " pod="openstack/cinder-api-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.191150 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f71d6351-0ed1-4047-bffa-cc1020e38ecf-logs\") pod \"cinder-api-0\" (UID: \"f71d6351-0ed1-4047-bffa-cc1020e38ecf\") " pod="openstack/cinder-api-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.207040 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f71d6351-0ed1-4047-bffa-cc1020e38ecf-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"f71d6351-0ed1-4047-bffa-cc1020e38ecf\") " pod="openstack/cinder-api-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.209611 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f71d6351-0ed1-4047-bffa-cc1020e38ecf-config-data\") pod \"cinder-api-0\" (UID: \"f71d6351-0ed1-4047-bffa-cc1020e38ecf\") " pod="openstack/cinder-api-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.209854 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f71d6351-0ed1-4047-bffa-cc1020e38ecf-scripts\") pod \"cinder-api-0\" (UID: \"f71d6351-0ed1-4047-bffa-cc1020e38ecf\") " pod="openstack/cinder-api-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.212330 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f71d6351-0ed1-4047-bffa-cc1020e38ecf-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f71d6351-0ed1-4047-bffa-cc1020e38ecf\") " pod="openstack/cinder-api-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.213309 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkncj\" (UniqueName: \"kubernetes.io/projected/f71d6351-0ed1-4047-bffa-cc1020e38ecf-kube-api-access-zkncj\") pod \"cinder-api-0\" (UID: \"f71d6351-0ed1-4047-bffa-cc1020e38ecf\") " pod="openstack/cinder-api-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.214559 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f71d6351-0ed1-4047-bffa-cc1020e38ecf-config-data-custom\") pod \"cinder-api-0\" (UID: \"f71d6351-0ed1-4047-bffa-cc1020e38ecf\") " pod="openstack/cinder-api-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.214942 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f71d6351-0ed1-4047-bffa-cc1020e38ecf-public-tls-certs\") pod \"cinder-api-0\" (UID: \"f71d6351-0ed1-4047-bffa-cc1020e38ecf\") " pod="openstack/cinder-api-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.319806 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.697702 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64484577-e70a-44bd-aefc-27864dccdc4c" path="/var/lib/kubelet/pods/64484577-e70a-44bd-aefc-27864dccdc4c/volumes" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.790539 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 12:38:20 crc kubenswrapper[5002]: W0930 12:38:20.793522 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf71d6351_0ed1_4047_bffa_cc1020e38ecf.slice/crio-7d17ff0759ca2c91be821934057ac3fe3fc38ad555c4aa7ebbff8e28a8a38eac WatchSource:0}: Error finding container 7d17ff0759ca2c91be821934057ac3fe3fc38ad555c4aa7ebbff8e28a8a38eac: Status 404 returned error can't find the container with id 7d17ff0759ca2c91be821934057ac3fe3fc38ad555c4aa7ebbff8e28a8a38eac Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.857147 5002 generic.go:334] "Generic (PLEG): container finished" podID="5cb9bff6-f879-4cb0-87ca-9a1879f8c516" containerID="3df199381973c968502aa99e0c1d592eb10b7346112a85f99ec1f4bd5c28c8c7" exitCode=143 Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.857230 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5cb9bff6-f879-4cb0-87ca-9a1879f8c516","Type":"ContainerDied","Data":"3df199381973c968502aa99e0c1d592eb10b7346112a85f99ec1f4bd5c28c8c7"} Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.860714 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a446bd7c-bf30-4027-b9d9-9c7087dff156","Type":"ContainerStarted","Data":"0760716e035976c3070797a34595f5cfedda6594d61778a653840fc57fa39fa8"} Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.860860 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.863119 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f71d6351-0ed1-4047-bffa-cc1020e38ecf","Type":"ContainerStarted","Data":"7d17ff0759ca2c91be821934057ac3fe3fc38ad555c4aa7ebbff8e28a8a38eac"} Sep 30 12:38:20 crc kubenswrapper[5002]: I0930 12:38:20.887881 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.935192915 podStartE2EDuration="6.887857985s" podCreationTimestamp="2025-09-30 12:38:14 +0000 UTC" firstStartedPulling="2025-09-30 12:38:15.644271805 +0000 UTC m=+1069.893953951" lastFinishedPulling="2025-09-30 12:38:20.596936875 +0000 UTC m=+1074.846619021" observedRunningTime="2025-09-30 12:38:20.878389944 +0000 UTC m=+1075.128072090" watchObservedRunningTime="2025-09-30 12:38:20.887857985 +0000 UTC m=+1075.137540131" Sep 30 12:38:21 crc kubenswrapper[5002]: I0930 12:38:21.237116 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 12:38:21 crc kubenswrapper[5002]: I0930 12:38:21.237341 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="a39b35d6-60c5-4ceb-b46e-9a00daf421ce" containerName="glance-log" containerID="cri-o://cb042ef982b9bba57298308ede4cfed4f64370303f1c8aff2feb543e7f657ff2" gracePeriod=30 Sep 30 12:38:21 crc kubenswrapper[5002]: I0930 12:38:21.237508 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="a39b35d6-60c5-4ceb-b46e-9a00daf421ce" containerName="glance-httpd" containerID="cri-o://4a6d4dfe07a149acaf5c29368d8b2ab40a63c24a6f678e21205f44c0e8edd66e" gracePeriod=30 Sep 30 12:38:21 crc kubenswrapper[5002]: I0930 12:38:21.865904 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-c69f-account-create-hcqbp"] Sep 30 12:38:21 crc kubenswrapper[5002]: I0930 12:38:21.867909 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-c69f-account-create-hcqbp" Sep 30 12:38:21 crc kubenswrapper[5002]: I0930 12:38:21.872298 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Sep 30 12:38:21 crc kubenswrapper[5002]: I0930 12:38:21.874778 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-c69f-account-create-hcqbp"] Sep 30 12:38:21 crc kubenswrapper[5002]: I0930 12:38:21.890888 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f71d6351-0ed1-4047-bffa-cc1020e38ecf","Type":"ContainerStarted","Data":"b55cdfdde8bbcde8c44582b435af032ab40bd37a727c30a2196a6f22862ab822"} Sep 30 12:38:21 crc kubenswrapper[5002]: I0930 12:38:21.903992 5002 generic.go:334] "Generic (PLEG): container finished" podID="a39b35d6-60c5-4ceb-b46e-9a00daf421ce" containerID="cb042ef982b9bba57298308ede4cfed4f64370303f1c8aff2feb543e7f657ff2" exitCode=143 Sep 30 12:38:21 crc kubenswrapper[5002]: I0930 12:38:21.904891 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a39b35d6-60c5-4ceb-b46e-9a00daf421ce","Type":"ContainerDied","Data":"cb042ef982b9bba57298308ede4cfed4f64370303f1c8aff2feb543e7f657ff2"} Sep 30 12:38:21 crc kubenswrapper[5002]: I0930 12:38:21.926360 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trh8x\" (UniqueName: \"kubernetes.io/projected/fa09a798-5d72-49bb-adb7-ea553a2f6e74-kube-api-access-trh8x\") pod \"nova-api-c69f-account-create-hcqbp\" (UID: \"fa09a798-5d72-49bb-adb7-ea553a2f6e74\") " pod="openstack/nova-api-c69f-account-create-hcqbp" Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.028149 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trh8x\" (UniqueName: \"kubernetes.io/projected/fa09a798-5d72-49bb-adb7-ea553a2f6e74-kube-api-access-trh8x\") pod \"nova-api-c69f-account-create-hcqbp\" (UID: \"fa09a798-5d72-49bb-adb7-ea553a2f6e74\") " pod="openstack/nova-api-c69f-account-create-hcqbp" Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.055718 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trh8x\" (UniqueName: \"kubernetes.io/projected/fa09a798-5d72-49bb-adb7-ea553a2f6e74-kube-api-access-trh8x\") pod \"nova-api-c69f-account-create-hcqbp\" (UID: \"fa09a798-5d72-49bb-adb7-ea553a2f6e74\") " pod="openstack/nova-api-c69f-account-create-hcqbp" Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.079289 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-f016-account-create-g57kg"] Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.080650 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-f016-account-create-g57kg" Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.083919 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.088099 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-f016-account-create-g57kg"] Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.191712 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-c69f-account-create-hcqbp" Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.230752 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.238715 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95c6l\" (UniqueName: \"kubernetes.io/projected/a9278220-9445-4f33-a4e4-a2224206b58e-kube-api-access-95c6l\") pod \"nova-cell0-f016-account-create-g57kg\" (UID: \"a9278220-9445-4f33-a4e4-a2224206b58e\") " pod="openstack/nova-cell0-f016-account-create-g57kg" Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.272159 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-89b0-account-create-qsmbt"] Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.273366 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-89b0-account-create-qsmbt" Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.275082 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.284621 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-89b0-account-create-qsmbt"] Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.345790 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sd6hx\" (UniqueName: \"kubernetes.io/projected/3fcc312f-04fb-44a6-bfba-f083655d42bb-kube-api-access-sd6hx\") pod \"nova-cell1-89b0-account-create-qsmbt\" (UID: \"3fcc312f-04fb-44a6-bfba-f083655d42bb\") " pod="openstack/nova-cell1-89b0-account-create-qsmbt" Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.345927 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95c6l\" (UniqueName: \"kubernetes.io/projected/a9278220-9445-4f33-a4e4-a2224206b58e-kube-api-access-95c6l\") pod \"nova-cell0-f016-account-create-g57kg\" (UID: \"a9278220-9445-4f33-a4e4-a2224206b58e\") " pod="openstack/nova-cell0-f016-account-create-g57kg" Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.370254 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95c6l\" (UniqueName: \"kubernetes.io/projected/a9278220-9445-4f33-a4e4-a2224206b58e-kube-api-access-95c6l\") pod \"nova-cell0-f016-account-create-g57kg\" (UID: \"a9278220-9445-4f33-a4e4-a2224206b58e\") " pod="openstack/nova-cell0-f016-account-create-g57kg" Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.447798 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sd6hx\" (UniqueName: \"kubernetes.io/projected/3fcc312f-04fb-44a6-bfba-f083655d42bb-kube-api-access-sd6hx\") pod \"nova-cell1-89b0-account-create-qsmbt\" (UID: \"3fcc312f-04fb-44a6-bfba-f083655d42bb\") " pod="openstack/nova-cell1-89b0-account-create-qsmbt" Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.466926 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-f016-account-create-g57kg" Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.471105 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sd6hx\" (UniqueName: \"kubernetes.io/projected/3fcc312f-04fb-44a6-bfba-f083655d42bb-kube-api-access-sd6hx\") pod \"nova-cell1-89b0-account-create-qsmbt\" (UID: \"3fcc312f-04fb-44a6-bfba-f083655d42bb\") " pod="openstack/nova-cell1-89b0-account-create-qsmbt" Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.593966 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-784489f99d-nqwcr" Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.666836 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-89b0-account-create-qsmbt" Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.761608 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-c69f-account-create-hcqbp"] Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.934955 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f71d6351-0ed1-4047-bffa-cc1020e38ecf","Type":"ContainerStarted","Data":"a1ecca79b5004ef90d7f3a7741826895186bf9416f22b146965b54cda058160f"} Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.936423 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.937888 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a446bd7c-bf30-4027-b9d9-9c7087dff156" containerName="ceilometer-central-agent" containerID="cri-o://8f5325aab6e4fa4343a483237b57a41a4f3025769f3bf19b8f0b56156e4b5d9c" gracePeriod=30 Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.938317 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-c69f-account-create-hcqbp" event={"ID":"fa09a798-5d72-49bb-adb7-ea553a2f6e74","Type":"ContainerStarted","Data":"248c8f47b28b1ee132e015d577f778fb6b9291c5b55b052f7620c85c0d9d1c57"} Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.938446 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a446bd7c-bf30-4027-b9d9-9c7087dff156" containerName="proxy-httpd" containerID="cri-o://0760716e035976c3070797a34595f5cfedda6594d61778a653840fc57fa39fa8" gracePeriod=30 Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.938625 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a446bd7c-bf30-4027-b9d9-9c7087dff156" containerName="sg-core" containerID="cri-o://61f2ba70d12da07efab04d7b5795905b7b1e32bbd84eccb5ae1c512962be419b" gracePeriod=30 Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.938718 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a446bd7c-bf30-4027-b9d9-9c7087dff156" containerName="ceilometer-notification-agent" containerID="cri-o://34e4053fbd3e8b9d086d98db68f823df332fd0c31283a755880f78d523756a4c" gracePeriod=30 Sep 30 12:38:22 crc kubenswrapper[5002]: I0930 12:38:22.972243 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.972197026 podStartE2EDuration="3.972197026s" podCreationTimestamp="2025-09-30 12:38:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:38:22.971250931 +0000 UTC m=+1077.220933087" watchObservedRunningTime="2025-09-30 12:38:22.972197026 +0000 UTC m=+1077.221879192" Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.087697 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-f016-account-create-g57kg"] Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.348979 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-89b0-account-create-qsmbt"] Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.816933 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.896777 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-logs\") pod \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.896835 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-config-data\") pod \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.896907 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-scripts\") pod \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.896964 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-combined-ca-bundle\") pod \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.896984 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-httpd-run\") pod \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.896997 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-public-tls-certs\") pod \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.897036 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.897082 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tq4xz\" (UniqueName: \"kubernetes.io/projected/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-kube-api-access-tq4xz\") pod \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\" (UID: \"5cb9bff6-f879-4cb0-87ca-9a1879f8c516\") " Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.897502 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "5cb9bff6-f879-4cb0-87ca-9a1879f8c516" (UID: "5cb9bff6-f879-4cb0-87ca-9a1879f8c516"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.897611 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-logs" (OuterVolumeSpecName: "logs") pod "5cb9bff6-f879-4cb0-87ca-9a1879f8c516" (UID: "5cb9bff6-f879-4cb0-87ca-9a1879f8c516"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.905852 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-kube-api-access-tq4xz" (OuterVolumeSpecName: "kube-api-access-tq4xz") pod "5cb9bff6-f879-4cb0-87ca-9a1879f8c516" (UID: "5cb9bff6-f879-4cb0-87ca-9a1879f8c516"). InnerVolumeSpecName "kube-api-access-tq4xz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.906404 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-scripts" (OuterVolumeSpecName: "scripts") pod "5cb9bff6-f879-4cb0-87ca-9a1879f8c516" (UID: "5cb9bff6-f879-4cb0-87ca-9a1879f8c516"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.907518 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "5cb9bff6-f879-4cb0-87ca-9a1879f8c516" (UID: "5cb9bff6-f879-4cb0-87ca-9a1879f8c516"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.938679 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5cb9bff6-f879-4cb0-87ca-9a1879f8c516" (UID: "5cb9bff6-f879-4cb0-87ca-9a1879f8c516"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.957325 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-f016-account-create-g57kg" event={"ID":"a9278220-9445-4f33-a4e4-a2224206b58e","Type":"ContainerStarted","Data":"cb0b5286d1ef4e1be6bf180bf3c6c0e16cae232fa043f16393e9eba6d67b31a7"} Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.957380 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-f016-account-create-g57kg" event={"ID":"a9278220-9445-4f33-a4e4-a2224206b58e","Type":"ContainerStarted","Data":"32b93be5a56996fbcddb3a916204b5b1abdaa45f73b627de79f2ca7c0c164e32"} Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.967840 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "5cb9bff6-f879-4cb0-87ca-9a1879f8c516" (UID: "5cb9bff6-f879-4cb0-87ca-9a1879f8c516"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.971877 5002 generic.go:334] "Generic (PLEG): container finished" podID="fa09a798-5d72-49bb-adb7-ea553a2f6e74" containerID="91bca0cfabf89f30330744570d3cbc0de9ddb00b0af437af6658490f5a99fdff" exitCode=0 Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.971953 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-c69f-account-create-hcqbp" event={"ID":"fa09a798-5d72-49bb-adb7-ea553a2f6e74","Type":"ContainerDied","Data":"91bca0cfabf89f30330744570d3cbc0de9ddb00b0af437af6658490f5a99fdff"} Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.979601 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-config-data" (OuterVolumeSpecName: "config-data") pod "5cb9bff6-f879-4cb0-87ca-9a1879f8c516" (UID: "5cb9bff6-f879-4cb0-87ca-9a1879f8c516"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.981267 5002 generic.go:334] "Generic (PLEG): container finished" podID="5cb9bff6-f879-4cb0-87ca-9a1879f8c516" containerID="c35f92f62bacb2046bb207b031c37b392d4486f818a61d44e1501770052babf0" exitCode=0 Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.981337 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5cb9bff6-f879-4cb0-87ca-9a1879f8c516","Type":"ContainerDied","Data":"c35f92f62bacb2046bb207b031c37b392d4486f818a61d44e1501770052babf0"} Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.981365 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5cb9bff6-f879-4cb0-87ca-9a1879f8c516","Type":"ContainerDied","Data":"9dda3d81cbd7a6e68714d7b41f2b0bea45c77f2ae2007cf420c108537c62e903"} Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.981376 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.981384 5002 scope.go:117] "RemoveContainer" containerID="c35f92f62bacb2046bb207b031c37b392d4486f818a61d44e1501770052babf0" Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.986737 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-89b0-account-create-qsmbt" event={"ID":"3fcc312f-04fb-44a6-bfba-f083655d42bb","Type":"ContainerStarted","Data":"e568b901f2f004554c2b51e6ca6d7bfb87cd86d554606ee1c5d0b24090253d1d"} Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.986788 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-89b0-account-create-qsmbt" event={"ID":"3fcc312f-04fb-44a6-bfba-f083655d42bb","Type":"ContainerStarted","Data":"2d9bcfa4127ff13b939d8aaff552511dbe4fc54f4a7ad775979e3443af38b727"} Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.988018 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-f016-account-create-g57kg" podStartSLOduration=1.9880027390000001 podStartE2EDuration="1.988002739s" podCreationTimestamp="2025-09-30 12:38:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:38:23.983264249 +0000 UTC m=+1078.232946415" watchObservedRunningTime="2025-09-30 12:38:23.988002739 +0000 UTC m=+1078.237684885" Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.994760 5002 generic.go:334] "Generic (PLEG): container finished" podID="a446bd7c-bf30-4027-b9d9-9c7087dff156" containerID="0760716e035976c3070797a34595f5cfedda6594d61778a653840fc57fa39fa8" exitCode=0 Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.994798 5002 generic.go:334] "Generic (PLEG): container finished" podID="a446bd7c-bf30-4027-b9d9-9c7087dff156" containerID="61f2ba70d12da07efab04d7b5795905b7b1e32bbd84eccb5ae1c512962be419b" exitCode=2 Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.994809 5002 generic.go:334] "Generic (PLEG): container finished" podID="a446bd7c-bf30-4027-b9d9-9c7087dff156" containerID="34e4053fbd3e8b9d086d98db68f823df332fd0c31283a755880f78d523756a4c" exitCode=0 Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.995108 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a446bd7c-bf30-4027-b9d9-9c7087dff156","Type":"ContainerDied","Data":"0760716e035976c3070797a34595f5cfedda6594d61778a653840fc57fa39fa8"} Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.995171 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a446bd7c-bf30-4027-b9d9-9c7087dff156","Type":"ContainerDied","Data":"61f2ba70d12da07efab04d7b5795905b7b1e32bbd84eccb5ae1c512962be419b"} Sep 30 12:38:23 crc kubenswrapper[5002]: I0930 12:38:23.995186 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a446bd7c-bf30-4027-b9d9-9c7087dff156","Type":"ContainerDied","Data":"34e4053fbd3e8b9d086d98db68f823df332fd0c31283a755880f78d523756a4c"} Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.000942 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tq4xz\" (UniqueName: \"kubernetes.io/projected/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-kube-api-access-tq4xz\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.000970 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-logs\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.000979 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.000987 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.000996 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.001003 5002 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.001011 5002 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cb9bff6-f879-4cb0-87ca-9a1879f8c516-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.001044 5002 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.009850 5002 scope.go:117] "RemoveContainer" containerID="3df199381973c968502aa99e0c1d592eb10b7346112a85f99ec1f4bd5c28c8c7" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.016145 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-89b0-account-create-qsmbt" podStartSLOduration=2.016124725 podStartE2EDuration="2.016124725s" podCreationTimestamp="2025-09-30 12:38:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:38:24.015038267 +0000 UTC m=+1078.264720423" watchObservedRunningTime="2025-09-30 12:38:24.016124725 +0000 UTC m=+1078.265806871" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.023577 5002 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.035483 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.037706 5002 scope.go:117] "RemoveContainer" containerID="c35f92f62bacb2046bb207b031c37b392d4486f818a61d44e1501770052babf0" Sep 30 12:38:24 crc kubenswrapper[5002]: E0930 12:38:24.039153 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c35f92f62bacb2046bb207b031c37b392d4486f818a61d44e1501770052babf0\": container with ID starting with c35f92f62bacb2046bb207b031c37b392d4486f818a61d44e1501770052babf0 not found: ID does not exist" containerID="c35f92f62bacb2046bb207b031c37b392d4486f818a61d44e1501770052babf0" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.039222 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c35f92f62bacb2046bb207b031c37b392d4486f818a61d44e1501770052babf0"} err="failed to get container status \"c35f92f62bacb2046bb207b031c37b392d4486f818a61d44e1501770052babf0\": rpc error: code = NotFound desc = could not find container \"c35f92f62bacb2046bb207b031c37b392d4486f818a61d44e1501770052babf0\": container with ID starting with c35f92f62bacb2046bb207b031c37b392d4486f818a61d44e1501770052babf0 not found: ID does not exist" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.039253 5002 scope.go:117] "RemoveContainer" containerID="3df199381973c968502aa99e0c1d592eb10b7346112a85f99ec1f4bd5c28c8c7" Sep 30 12:38:24 crc kubenswrapper[5002]: E0930 12:38:24.039688 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3df199381973c968502aa99e0c1d592eb10b7346112a85f99ec1f4bd5c28c8c7\": container with ID starting with 3df199381973c968502aa99e0c1d592eb10b7346112a85f99ec1f4bd5c28c8c7 not found: ID does not exist" containerID="3df199381973c968502aa99e0c1d592eb10b7346112a85f99ec1f4bd5c28c8c7" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.039723 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3df199381973c968502aa99e0c1d592eb10b7346112a85f99ec1f4bd5c28c8c7"} err="failed to get container status \"3df199381973c968502aa99e0c1d592eb10b7346112a85f99ec1f4bd5c28c8c7\": rpc error: code = NotFound desc = could not find container \"3df199381973c968502aa99e0c1d592eb10b7346112a85f99ec1f4bd5c28c8c7\": container with ID starting with 3df199381973c968502aa99e0c1d592eb10b7346112a85f99ec1f4bd5c28c8c7 not found: ID does not exist" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.053952 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.060059 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 12:38:24 crc kubenswrapper[5002]: E0930 12:38:24.060456 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cb9bff6-f879-4cb0-87ca-9a1879f8c516" containerName="glance-log" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.060859 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cb9bff6-f879-4cb0-87ca-9a1879f8c516" containerName="glance-log" Sep 30 12:38:24 crc kubenswrapper[5002]: E0930 12:38:24.060889 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cb9bff6-f879-4cb0-87ca-9a1879f8c516" containerName="glance-httpd" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.060896 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cb9bff6-f879-4cb0-87ca-9a1879f8c516" containerName="glance-httpd" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.061063 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cb9bff6-f879-4cb0-87ca-9a1879f8c516" containerName="glance-log" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.061088 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cb9bff6-f879-4cb0-87ca-9a1879f8c516" containerName="glance-httpd" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.062262 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.063789 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.064370 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.094794 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.104676 5002 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.206598 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b5a818b5-30cc-4c21-b7c4-7563b49832eb-scripts\") pod \"glance-default-external-api-0\" (UID: \"b5a818b5-30cc-4c21-b7c4-7563b49832eb\") " pod="openstack/glance-default-external-api-0" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.206832 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"b5a818b5-30cc-4c21-b7c4-7563b49832eb\") " pod="openstack/glance-default-external-api-0" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.206871 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b5a818b5-30cc-4c21-b7c4-7563b49832eb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b5a818b5-30cc-4c21-b7c4-7563b49832eb\") " pod="openstack/glance-default-external-api-0" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.206895 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5a818b5-30cc-4c21-b7c4-7563b49832eb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b5a818b5-30cc-4c21-b7c4-7563b49832eb\") " pod="openstack/glance-default-external-api-0" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.206923 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5a818b5-30cc-4c21-b7c4-7563b49832eb-config-data\") pod \"glance-default-external-api-0\" (UID: \"b5a818b5-30cc-4c21-b7c4-7563b49832eb\") " pod="openstack/glance-default-external-api-0" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.207046 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqp4s\" (UniqueName: \"kubernetes.io/projected/b5a818b5-30cc-4c21-b7c4-7563b49832eb-kube-api-access-vqp4s\") pod \"glance-default-external-api-0\" (UID: \"b5a818b5-30cc-4c21-b7c4-7563b49832eb\") " pod="openstack/glance-default-external-api-0" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.207129 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5a818b5-30cc-4c21-b7c4-7563b49832eb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b5a818b5-30cc-4c21-b7c4-7563b49832eb\") " pod="openstack/glance-default-external-api-0" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.207371 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5a818b5-30cc-4c21-b7c4-7563b49832eb-logs\") pod \"glance-default-external-api-0\" (UID: \"b5a818b5-30cc-4c21-b7c4-7563b49832eb\") " pod="openstack/glance-default-external-api-0" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.308975 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b5a818b5-30cc-4c21-b7c4-7563b49832eb-scripts\") pod \"glance-default-external-api-0\" (UID: \"b5a818b5-30cc-4c21-b7c4-7563b49832eb\") " pod="openstack/glance-default-external-api-0" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.309040 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"b5a818b5-30cc-4c21-b7c4-7563b49832eb\") " pod="openstack/glance-default-external-api-0" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.309090 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b5a818b5-30cc-4c21-b7c4-7563b49832eb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b5a818b5-30cc-4c21-b7c4-7563b49832eb\") " pod="openstack/glance-default-external-api-0" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.309121 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5a818b5-30cc-4c21-b7c4-7563b49832eb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b5a818b5-30cc-4c21-b7c4-7563b49832eb\") " pod="openstack/glance-default-external-api-0" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.309155 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5a818b5-30cc-4c21-b7c4-7563b49832eb-config-data\") pod \"glance-default-external-api-0\" (UID: \"b5a818b5-30cc-4c21-b7c4-7563b49832eb\") " pod="openstack/glance-default-external-api-0" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.309191 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqp4s\" (UniqueName: \"kubernetes.io/projected/b5a818b5-30cc-4c21-b7c4-7563b49832eb-kube-api-access-vqp4s\") pod \"glance-default-external-api-0\" (UID: \"b5a818b5-30cc-4c21-b7c4-7563b49832eb\") " pod="openstack/glance-default-external-api-0" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.309227 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5a818b5-30cc-4c21-b7c4-7563b49832eb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b5a818b5-30cc-4c21-b7c4-7563b49832eb\") " pod="openstack/glance-default-external-api-0" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.309295 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5a818b5-30cc-4c21-b7c4-7563b49832eb-logs\") pod \"glance-default-external-api-0\" (UID: \"b5a818b5-30cc-4c21-b7c4-7563b49832eb\") " pod="openstack/glance-default-external-api-0" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.309814 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b5a818b5-30cc-4c21-b7c4-7563b49832eb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b5a818b5-30cc-4c21-b7c4-7563b49832eb\") " pod="openstack/glance-default-external-api-0" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.309899 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5a818b5-30cc-4c21-b7c4-7563b49832eb-logs\") pod \"glance-default-external-api-0\" (UID: \"b5a818b5-30cc-4c21-b7c4-7563b49832eb\") " pod="openstack/glance-default-external-api-0" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.310157 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"b5a818b5-30cc-4c21-b7c4-7563b49832eb\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-external-api-0" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.321272 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b5a818b5-30cc-4c21-b7c4-7563b49832eb-scripts\") pod \"glance-default-external-api-0\" (UID: \"b5a818b5-30cc-4c21-b7c4-7563b49832eb\") " pod="openstack/glance-default-external-api-0" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.321781 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5a818b5-30cc-4c21-b7c4-7563b49832eb-config-data\") pod \"glance-default-external-api-0\" (UID: \"b5a818b5-30cc-4c21-b7c4-7563b49832eb\") " pod="openstack/glance-default-external-api-0" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.321939 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5a818b5-30cc-4c21-b7c4-7563b49832eb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b5a818b5-30cc-4c21-b7c4-7563b49832eb\") " pod="openstack/glance-default-external-api-0" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.332599 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqp4s\" (UniqueName: \"kubernetes.io/projected/b5a818b5-30cc-4c21-b7c4-7563b49832eb-kube-api-access-vqp4s\") pod \"glance-default-external-api-0\" (UID: \"b5a818b5-30cc-4c21-b7c4-7563b49832eb\") " pod="openstack/glance-default-external-api-0" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.341766 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5a818b5-30cc-4c21-b7c4-7563b49832eb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b5a818b5-30cc-4c21-b7c4-7563b49832eb\") " pod="openstack/glance-default-external-api-0" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.367074 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"b5a818b5-30cc-4c21-b7c4-7563b49832eb\") " pod="openstack/glance-default-external-api-0" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.449311 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.689593 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5cb9bff6-f879-4cb0-87ca-9a1879f8c516" path="/var/lib/kubelet/pods/5cb9bff6-f879-4cb0-87ca-9a1879f8c516/volumes" Sep 30 12:38:24 crc kubenswrapper[5002]: I0930 12:38:24.971232 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.010338 5002 generic.go:334] "Generic (PLEG): container finished" podID="3fcc312f-04fb-44a6-bfba-f083655d42bb" containerID="e568b901f2f004554c2b51e6ca6d7bfb87cd86d554606ee1c5d0b24090253d1d" exitCode=0 Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.010406 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-89b0-account-create-qsmbt" event={"ID":"3fcc312f-04fb-44a6-bfba-f083655d42bb","Type":"ContainerDied","Data":"e568b901f2f004554c2b51e6ca6d7bfb87cd86d554606ee1c5d0b24090253d1d"} Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.021459 5002 generic.go:334] "Generic (PLEG): container finished" podID="a446bd7c-bf30-4027-b9d9-9c7087dff156" containerID="8f5325aab6e4fa4343a483237b57a41a4f3025769f3bf19b8f0b56156e4b5d9c" exitCode=0 Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.021651 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a446bd7c-bf30-4027-b9d9-9c7087dff156","Type":"ContainerDied","Data":"8f5325aab6e4fa4343a483237b57a41a4f3025769f3bf19b8f0b56156e4b5d9c"} Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.024280 5002 generic.go:334] "Generic (PLEG): container finished" podID="a39b35d6-60c5-4ceb-b46e-9a00daf421ce" containerID="4a6d4dfe07a149acaf5c29368d8b2ab40a63c24a6f678e21205f44c0e8edd66e" exitCode=0 Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.024342 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a39b35d6-60c5-4ceb-b46e-9a00daf421ce","Type":"ContainerDied","Data":"4a6d4dfe07a149acaf5c29368d8b2ab40a63c24a6f678e21205f44c0e8edd66e"} Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.024371 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a39b35d6-60c5-4ceb-b46e-9a00daf421ce","Type":"ContainerDied","Data":"dfd7e88822b8a9fdfaca82d5fb76e1c34b62c5d069c8fd685db27aec25cbf9a5"} Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.024409 5002 scope.go:117] "RemoveContainer" containerID="4a6d4dfe07a149acaf5c29368d8b2ab40a63c24a6f678e21205f44c0e8edd66e" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.024571 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.032694 5002 generic.go:334] "Generic (PLEG): container finished" podID="a9278220-9445-4f33-a4e4-a2224206b58e" containerID="cb0b5286d1ef4e1be6bf180bf3c6c0e16cae232fa043f16393e9eba6d67b31a7" exitCode=0 Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.032933 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-f016-account-create-g57kg" event={"ID":"a9278220-9445-4f33-a4e4-a2224206b58e","Type":"ContainerDied","Data":"cb0b5286d1ef4e1be6bf180bf3c6c0e16cae232fa043f16393e9eba6d67b31a7"} Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.068218 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 12:38:25 crc kubenswrapper[5002]: W0930 12:38:25.076540 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb5a818b5_30cc_4c21_b7c4_7563b49832eb.slice/crio-1d692e8391aef15ab74749bf2d7248a8ac7d09bdc2b47477ca2bda011aa1bca2 WatchSource:0}: Error finding container 1d692e8391aef15ab74749bf2d7248a8ac7d09bdc2b47477ca2bda011aa1bca2: Status 404 returned error can't find the container with id 1d692e8391aef15ab74749bf2d7248a8ac7d09bdc2b47477ca2bda011aa1bca2 Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.082878 5002 scope.go:117] "RemoveContainer" containerID="cb042ef982b9bba57298308ede4cfed4f64370303f1c8aff2feb543e7f657ff2" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.129197 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-internal-tls-certs\") pod \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.129273 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-combined-ca-bundle\") pod \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.129367 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-scripts\") pod \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.129416 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.129448 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k7c25\" (UniqueName: \"kubernetes.io/projected/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-kube-api-access-k7c25\") pod \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.129549 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-httpd-run\") pod \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.129578 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-logs\") pod \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.129635 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-config-data\") pod \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\" (UID: \"a39b35d6-60c5-4ceb-b46e-9a00daf421ce\") " Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.132249 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "a39b35d6-60c5-4ceb-b46e-9a00daf421ce" (UID: "a39b35d6-60c5-4ceb-b46e-9a00daf421ce"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.132731 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-logs" (OuterVolumeSpecName: "logs") pod "a39b35d6-60c5-4ceb-b46e-9a00daf421ce" (UID: "a39b35d6-60c5-4ceb-b46e-9a00daf421ce"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.139898 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "glance") pod "a39b35d6-60c5-4ceb-b46e-9a00daf421ce" (UID: "a39b35d6-60c5-4ceb-b46e-9a00daf421ce"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.143821 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-kube-api-access-k7c25" (OuterVolumeSpecName: "kube-api-access-k7c25") pod "a39b35d6-60c5-4ceb-b46e-9a00daf421ce" (UID: "a39b35d6-60c5-4ceb-b46e-9a00daf421ce"). InnerVolumeSpecName "kube-api-access-k7c25". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.157148 5002 scope.go:117] "RemoveContainer" containerID="4a6d4dfe07a149acaf5c29368d8b2ab40a63c24a6f678e21205f44c0e8edd66e" Sep 30 12:38:25 crc kubenswrapper[5002]: E0930 12:38:25.157736 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a6d4dfe07a149acaf5c29368d8b2ab40a63c24a6f678e21205f44c0e8edd66e\": container with ID starting with 4a6d4dfe07a149acaf5c29368d8b2ab40a63c24a6f678e21205f44c0e8edd66e not found: ID does not exist" containerID="4a6d4dfe07a149acaf5c29368d8b2ab40a63c24a6f678e21205f44c0e8edd66e" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.157768 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a6d4dfe07a149acaf5c29368d8b2ab40a63c24a6f678e21205f44c0e8edd66e"} err="failed to get container status \"4a6d4dfe07a149acaf5c29368d8b2ab40a63c24a6f678e21205f44c0e8edd66e\": rpc error: code = NotFound desc = could not find container \"4a6d4dfe07a149acaf5c29368d8b2ab40a63c24a6f678e21205f44c0e8edd66e\": container with ID starting with 4a6d4dfe07a149acaf5c29368d8b2ab40a63c24a6f678e21205f44c0e8edd66e not found: ID does not exist" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.157811 5002 scope.go:117] "RemoveContainer" containerID="cb042ef982b9bba57298308ede4cfed4f64370303f1c8aff2feb543e7f657ff2" Sep 30 12:38:25 crc kubenswrapper[5002]: E0930 12:38:25.160498 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb042ef982b9bba57298308ede4cfed4f64370303f1c8aff2feb543e7f657ff2\": container with ID starting with cb042ef982b9bba57298308ede4cfed4f64370303f1c8aff2feb543e7f657ff2 not found: ID does not exist" containerID="cb042ef982b9bba57298308ede4cfed4f64370303f1c8aff2feb543e7f657ff2" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.160533 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb042ef982b9bba57298308ede4cfed4f64370303f1c8aff2feb543e7f657ff2"} err="failed to get container status \"cb042ef982b9bba57298308ede4cfed4f64370303f1c8aff2feb543e7f657ff2\": rpc error: code = NotFound desc = could not find container \"cb042ef982b9bba57298308ede4cfed4f64370303f1c8aff2feb543e7f657ff2\": container with ID starting with cb042ef982b9bba57298308ede4cfed4f64370303f1c8aff2feb543e7f657ff2 not found: ID does not exist" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.160712 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-scripts" (OuterVolumeSpecName: "scripts") pod "a39b35d6-60c5-4ceb-b46e-9a00daf421ce" (UID: "a39b35d6-60c5-4ceb-b46e-9a00daf421ce"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.166494 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.206708 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "a39b35d6-60c5-4ceb-b46e-9a00daf421ce" (UID: "a39b35d6-60c5-4ceb-b46e-9a00daf421ce"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.228221 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-config-data" (OuterVolumeSpecName: "config-data") pod "a39b35d6-60c5-4ceb-b46e-9a00daf421ce" (UID: "a39b35d6-60c5-4ceb-b46e-9a00daf421ce"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.234701 5002 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.234721 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.234742 5002 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.234751 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k7c25\" (UniqueName: \"kubernetes.io/projected/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-kube-api-access-k7c25\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.234762 5002 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.234770 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-logs\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.234780 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.250746 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-zfwrs"] Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.251042 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" podUID="accc1f38-16e0-4b04-98d4-ebece5b81989" containerName="dnsmasq-dns" containerID="cri-o://06c4bf846bd93f27fcfc0e1370ee7441dddbd94ac2a871840b022bada23e98fa" gracePeriod=10 Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.261938 5002 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.308228 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a39b35d6-60c5-4ceb-b46e-9a00daf421ce" (UID: "a39b35d6-60c5-4ceb-b46e-9a00daf421ce"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.337001 5002 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.337026 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a39b35d6-60c5-4ceb-b46e-9a00daf421ce-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.377730 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.437200 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.446552 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.462782 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.465031 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-c69f-account-create-hcqbp" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.476396 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 12:38:25 crc kubenswrapper[5002]: E0930 12:38:25.479204 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa09a798-5d72-49bb-adb7-ea553a2f6e74" containerName="mariadb-account-create" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.479263 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa09a798-5d72-49bb-adb7-ea553a2f6e74" containerName="mariadb-account-create" Sep 30 12:38:25 crc kubenswrapper[5002]: E0930 12:38:25.479294 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a39b35d6-60c5-4ceb-b46e-9a00daf421ce" containerName="glance-log" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.479305 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a39b35d6-60c5-4ceb-b46e-9a00daf421ce" containerName="glance-log" Sep 30 12:38:25 crc kubenswrapper[5002]: E0930 12:38:25.479377 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a39b35d6-60c5-4ceb-b46e-9a00daf421ce" containerName="glance-httpd" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.479388 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a39b35d6-60c5-4ceb-b46e-9a00daf421ce" containerName="glance-httpd" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.479861 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa09a798-5d72-49bb-adb7-ea553a2f6e74" containerName="mariadb-account-create" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.479914 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a39b35d6-60c5-4ceb-b46e-9a00daf421ce" containerName="glance-httpd" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.479933 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a39b35d6-60c5-4ceb-b46e-9a00daf421ce" containerName="glance-log" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.482130 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.482250 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.488201 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.488462 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.540080 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-trh8x\" (UniqueName: \"kubernetes.io/projected/fa09a798-5d72-49bb-adb7-ea553a2f6e74-kube-api-access-trh8x\") pod \"fa09a798-5d72-49bb-adb7-ea553a2f6e74\" (UID: \"fa09a798-5d72-49bb-adb7-ea553a2f6e74\") " Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.549647 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa09a798-5d72-49bb-adb7-ea553a2f6e74-kube-api-access-trh8x" (OuterVolumeSpecName: "kube-api-access-trh8x") pod "fa09a798-5d72-49bb-adb7-ea553a2f6e74" (UID: "fa09a798-5d72-49bb-adb7-ea553a2f6e74"). InnerVolumeSpecName "kube-api-access-trh8x". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.642847 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/17156d80-5354-430c-a7f9-294bae55a11c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"17156d80-5354-430c-a7f9-294bae55a11c\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.642907 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17156d80-5354-430c-a7f9-294bae55a11c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"17156d80-5354-430c-a7f9-294bae55a11c\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.642956 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/17156d80-5354-430c-a7f9-294bae55a11c-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"17156d80-5354-430c-a7f9-294bae55a11c\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.643098 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17156d80-5354-430c-a7f9-294bae55a11c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"17156d80-5354-430c-a7f9-294bae55a11c\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.643202 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrjml\" (UniqueName: \"kubernetes.io/projected/17156d80-5354-430c-a7f9-294bae55a11c-kube-api-access-hrjml\") pod \"glance-default-internal-api-0\" (UID: \"17156d80-5354-430c-a7f9-294bae55a11c\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.643271 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17156d80-5354-430c-a7f9-294bae55a11c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"17156d80-5354-430c-a7f9-294bae55a11c\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.643302 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"17156d80-5354-430c-a7f9-294bae55a11c\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.643348 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17156d80-5354-430c-a7f9-294bae55a11c-logs\") pod \"glance-default-internal-api-0\" (UID: \"17156d80-5354-430c-a7f9-294bae55a11c\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.643413 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-trh8x\" (UniqueName: \"kubernetes.io/projected/fa09a798-5d72-49bb-adb7-ea553a2f6e74-kube-api-access-trh8x\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.744722 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17156d80-5354-430c-a7f9-294bae55a11c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"17156d80-5354-430c-a7f9-294bae55a11c\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.745243 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrjml\" (UniqueName: \"kubernetes.io/projected/17156d80-5354-430c-a7f9-294bae55a11c-kube-api-access-hrjml\") pod \"glance-default-internal-api-0\" (UID: \"17156d80-5354-430c-a7f9-294bae55a11c\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.745527 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17156d80-5354-430c-a7f9-294bae55a11c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"17156d80-5354-430c-a7f9-294bae55a11c\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.745592 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"17156d80-5354-430c-a7f9-294bae55a11c\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.745736 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17156d80-5354-430c-a7f9-294bae55a11c-logs\") pod \"glance-default-internal-api-0\" (UID: \"17156d80-5354-430c-a7f9-294bae55a11c\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.745859 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/17156d80-5354-430c-a7f9-294bae55a11c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"17156d80-5354-430c-a7f9-294bae55a11c\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.745913 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17156d80-5354-430c-a7f9-294bae55a11c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"17156d80-5354-430c-a7f9-294bae55a11c\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.745977 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/17156d80-5354-430c-a7f9-294bae55a11c-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"17156d80-5354-430c-a7f9-294bae55a11c\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.746931 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"17156d80-5354-430c-a7f9-294bae55a11c\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/glance-default-internal-api-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.752138 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17156d80-5354-430c-a7f9-294bae55a11c-logs\") pod \"glance-default-internal-api-0\" (UID: \"17156d80-5354-430c-a7f9-294bae55a11c\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.752547 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/17156d80-5354-430c-a7f9-294bae55a11c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"17156d80-5354-430c-a7f9-294bae55a11c\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.753657 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17156d80-5354-430c-a7f9-294bae55a11c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"17156d80-5354-430c-a7f9-294bae55a11c\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.755002 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/17156d80-5354-430c-a7f9-294bae55a11c-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"17156d80-5354-430c-a7f9-294bae55a11c\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.758900 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17156d80-5354-430c-a7f9-294bae55a11c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"17156d80-5354-430c-a7f9-294bae55a11c\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.760417 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17156d80-5354-430c-a7f9-294bae55a11c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"17156d80-5354-430c-a7f9-294bae55a11c\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.776703 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrjml\" (UniqueName: \"kubernetes.io/projected/17156d80-5354-430c-a7f9-294bae55a11c-kube-api-access-hrjml\") pod \"glance-default-internal-api-0\" (UID: \"17156d80-5354-430c-a7f9-294bae55a11c\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.788950 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"17156d80-5354-430c-a7f9-294bae55a11c\") " pod="openstack/glance-default-internal-api-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.798829 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.814035 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.941724 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.951354 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qvdkq\" (UniqueName: \"kubernetes.io/projected/a446bd7c-bf30-4027-b9d9-9c7087dff156-kube-api-access-qvdkq\") pod \"a446bd7c-bf30-4027-b9d9-9c7087dff156\" (UID: \"a446bd7c-bf30-4027-b9d9-9c7087dff156\") " Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.951409 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a446bd7c-bf30-4027-b9d9-9c7087dff156-config-data\") pod \"a446bd7c-bf30-4027-b9d9-9c7087dff156\" (UID: \"a446bd7c-bf30-4027-b9d9-9c7087dff156\") " Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.951438 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a446bd7c-bf30-4027-b9d9-9c7087dff156-run-httpd\") pod \"a446bd7c-bf30-4027-b9d9-9c7087dff156\" (UID: \"a446bd7c-bf30-4027-b9d9-9c7087dff156\") " Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.951492 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a446bd7c-bf30-4027-b9d9-9c7087dff156-combined-ca-bundle\") pod \"a446bd7c-bf30-4027-b9d9-9c7087dff156\" (UID: \"a446bd7c-bf30-4027-b9d9-9c7087dff156\") " Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.951532 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a446bd7c-bf30-4027-b9d9-9c7087dff156-sg-core-conf-yaml\") pod \"a446bd7c-bf30-4027-b9d9-9c7087dff156\" (UID: \"a446bd7c-bf30-4027-b9d9-9c7087dff156\") " Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.951592 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a446bd7c-bf30-4027-b9d9-9c7087dff156-scripts\") pod \"a446bd7c-bf30-4027-b9d9-9c7087dff156\" (UID: \"a446bd7c-bf30-4027-b9d9-9c7087dff156\") " Sep 30 12:38:25 crc kubenswrapper[5002]: I0930 12:38:25.951680 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a446bd7c-bf30-4027-b9d9-9c7087dff156-log-httpd\") pod \"a446bd7c-bf30-4027-b9d9-9c7087dff156\" (UID: \"a446bd7c-bf30-4027-b9d9-9c7087dff156\") " Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.052791 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-config\") pod \"accc1f38-16e0-4b04-98d4-ebece5b81989\" (UID: \"accc1f38-16e0-4b04-98d4-ebece5b81989\") " Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.053205 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bxtcl\" (UniqueName: \"kubernetes.io/projected/accc1f38-16e0-4b04-98d4-ebece5b81989-kube-api-access-bxtcl\") pod \"accc1f38-16e0-4b04-98d4-ebece5b81989\" (UID: \"accc1f38-16e0-4b04-98d4-ebece5b81989\") " Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.053331 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-ovsdbserver-sb\") pod \"accc1f38-16e0-4b04-98d4-ebece5b81989\" (UID: \"accc1f38-16e0-4b04-98d4-ebece5b81989\") " Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.053363 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-dns-svc\") pod \"accc1f38-16e0-4b04-98d4-ebece5b81989\" (UID: \"accc1f38-16e0-4b04-98d4-ebece5b81989\") " Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.053379 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-ovsdbserver-nb\") pod \"accc1f38-16e0-4b04-98d4-ebece5b81989\" (UID: \"accc1f38-16e0-4b04-98d4-ebece5b81989\") " Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.053548 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-dns-swift-storage-0\") pod \"accc1f38-16e0-4b04-98d4-ebece5b81989\" (UID: \"accc1f38-16e0-4b04-98d4-ebece5b81989\") " Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.057807 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/accc1f38-16e0-4b04-98d4-ebece5b81989-kube-api-access-bxtcl" (OuterVolumeSpecName: "kube-api-access-bxtcl") pod "accc1f38-16e0-4b04-98d4-ebece5b81989" (UID: "accc1f38-16e0-4b04-98d4-ebece5b81989"). InnerVolumeSpecName "kube-api-access-bxtcl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.067718 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-c69f-account-create-hcqbp" event={"ID":"fa09a798-5d72-49bb-adb7-ea553a2f6e74","Type":"ContainerDied","Data":"248c8f47b28b1ee132e015d577f778fb6b9291c5b55b052f7620c85c0d9d1c57"} Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.067738 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-c69f-account-create-hcqbp" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.067758 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="248c8f47b28b1ee132e015d577f778fb6b9291c5b55b052f7620c85c0d9d1c57" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.089371 5002 generic.go:334] "Generic (PLEG): container finished" podID="accc1f38-16e0-4b04-98d4-ebece5b81989" containerID="06c4bf846bd93f27fcfc0e1370ee7441dddbd94ac2a871840b022bada23e98fa" exitCode=0 Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.089447 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" event={"ID":"accc1f38-16e0-4b04-98d4-ebece5b81989","Type":"ContainerDied","Data":"06c4bf846bd93f27fcfc0e1370ee7441dddbd94ac2a871840b022bada23e98fa"} Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.089490 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" event={"ID":"accc1f38-16e0-4b04-98d4-ebece5b81989","Type":"ContainerDied","Data":"fb45501af464b9efa21f58883a09a5a3b58fdc97a29ed6df43d4af6051754c69"} Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.089507 5002 scope.go:117] "RemoveContainer" containerID="06c4bf846bd93f27fcfc0e1370ee7441dddbd94ac2a871840b022bada23e98fa" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.089612 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-zfwrs" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.112190 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "accc1f38-16e0-4b04-98d4-ebece5b81989" (UID: "accc1f38-16e0-4b04-98d4-ebece5b81989"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.119825 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a446bd7c-bf30-4027-b9d9-9c7087dff156-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a446bd7c-bf30-4027-b9d9-9c7087dff156" (UID: "a446bd7c-bf30-4027-b9d9-9c7087dff156"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.120308 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a446bd7c-bf30-4027-b9d9-9c7087dff156-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a446bd7c-bf30-4027-b9d9-9c7087dff156" (UID: "a446bd7c-bf30-4027-b9d9-9c7087dff156"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.123679 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a446bd7c-bf30-4027-b9d9-9c7087dff156","Type":"ContainerDied","Data":"e40a34f5a65b956efb645cbb07c4e1a560b7f1c6cdea79270016821cbb221783"} Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.123792 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.133972 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a446bd7c-bf30-4027-b9d9-9c7087dff156-kube-api-access-qvdkq" (OuterVolumeSpecName: "kube-api-access-qvdkq") pod "a446bd7c-bf30-4027-b9d9-9c7087dff156" (UID: "a446bd7c-bf30-4027-b9d9-9c7087dff156"). InnerVolumeSpecName "kube-api-access-qvdkq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.135772 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b5a818b5-30cc-4c21-b7c4-7563b49832eb","Type":"ContainerStarted","Data":"1d692e8391aef15ab74749bf2d7248a8ac7d09bdc2b47477ca2bda011aa1bca2"} Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.136021 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="3a7935aa-3f70-4f2c-94ba-5bc122c7f028" containerName="cinder-scheduler" containerID="cri-o://6ea4672c16802a15d413efd5848fef1f96bf45563ce6122edb91280564f54666" gracePeriod=30 Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.136721 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="3a7935aa-3f70-4f2c-94ba-5bc122c7f028" containerName="probe" containerID="cri-o://32bc3ef204bd519ca25bb5737cf54719111d9cd5f520797285287a8325913608" gracePeriod=30 Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.136940 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a446bd7c-bf30-4027-b9d9-9c7087dff156-scripts" (OuterVolumeSpecName: "scripts") pod "a446bd7c-bf30-4027-b9d9-9c7087dff156" (UID: "a446bd7c-bf30-4027-b9d9-9c7087dff156"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.139867 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a446bd7c-bf30-4027-b9d9-9c7087dff156-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a446bd7c-bf30-4027-b9d9-9c7087dff156" (UID: "a446bd7c-bf30-4027-b9d9-9c7087dff156"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.144580 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a446bd7c-bf30-4027-b9d9-9c7087dff156-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a446bd7c-bf30-4027-b9d9-9c7087dff156" (UID: "a446bd7c-bf30-4027-b9d9-9c7087dff156"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.156562 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a446bd7c-bf30-4027-b9d9-9c7087dff156-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.156595 5002 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a446bd7c-bf30-4027-b9d9-9c7087dff156-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.156606 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bxtcl\" (UniqueName: \"kubernetes.io/projected/accc1f38-16e0-4b04-98d4-ebece5b81989-kube-api-access-bxtcl\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.156617 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qvdkq\" (UniqueName: \"kubernetes.io/projected/a446bd7c-bf30-4027-b9d9-9c7087dff156-kube-api-access-qvdkq\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.156629 5002 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a446bd7c-bf30-4027-b9d9-9c7087dff156-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.156641 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.156651 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a446bd7c-bf30-4027-b9d9-9c7087dff156-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.156662 5002 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a446bd7c-bf30-4027-b9d9-9c7087dff156-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.221759 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "accc1f38-16e0-4b04-98d4-ebece5b81989" (UID: "accc1f38-16e0-4b04-98d4-ebece5b81989"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.225934 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "accc1f38-16e0-4b04-98d4-ebece5b81989" (UID: "accc1f38-16e0-4b04-98d4-ebece5b81989"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.230261 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-config" (OuterVolumeSpecName: "config") pod "accc1f38-16e0-4b04-98d4-ebece5b81989" (UID: "accc1f38-16e0-4b04-98d4-ebece5b81989"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.243774 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "accc1f38-16e0-4b04-98d4-ebece5b81989" (UID: "accc1f38-16e0-4b04-98d4-ebece5b81989"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.250520 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5c946cd5ff-n7x9t" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.260342 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.260376 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.260388 5002 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.260402 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/accc1f38-16e0-4b04-98d4-ebece5b81989-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.268692 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a446bd7c-bf30-4027-b9d9-9c7087dff156-config-data" (OuterVolumeSpecName: "config-data") pod "a446bd7c-bf30-4027-b9d9-9c7087dff156" (UID: "a446bd7c-bf30-4027-b9d9-9c7087dff156"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.365156 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a446bd7c-bf30-4027-b9d9-9c7087dff156-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.368710 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-784489f99d-nqwcr"] Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.369023 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-784489f99d-nqwcr" podUID="10afbe7c-91df-4843-96b2-ad180a2b9cd1" containerName="neutron-api" containerID="cri-o://f93a8c90672f5396b7f993dea6d68a3053142b40633703b7b6d3a573be0d8c22" gracePeriod=30 Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.369119 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-784489f99d-nqwcr" podUID="10afbe7c-91df-4843-96b2-ad180a2b9cd1" containerName="neutron-httpd" containerID="cri-o://6d26ffb59640b1d3e6ca14e606cdfaa97dc7ecba65a631be48df0d924d0febc3" gracePeriod=30 Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.389217 5002 scope.go:117] "RemoveContainer" containerID="d249b247b3be252d97cbcd924a57ebbd24c8000d9a6076ed1301b73b667f7218" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.432554 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-zfwrs"] Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.445620 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-zfwrs"] Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.462522 5002 scope.go:117] "RemoveContainer" containerID="06c4bf846bd93f27fcfc0e1370ee7441dddbd94ac2a871840b022bada23e98fa" Sep 30 12:38:26 crc kubenswrapper[5002]: E0930 12:38:26.462928 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"06c4bf846bd93f27fcfc0e1370ee7441dddbd94ac2a871840b022bada23e98fa\": container with ID starting with 06c4bf846bd93f27fcfc0e1370ee7441dddbd94ac2a871840b022bada23e98fa not found: ID does not exist" containerID="06c4bf846bd93f27fcfc0e1370ee7441dddbd94ac2a871840b022bada23e98fa" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.462952 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06c4bf846bd93f27fcfc0e1370ee7441dddbd94ac2a871840b022bada23e98fa"} err="failed to get container status \"06c4bf846bd93f27fcfc0e1370ee7441dddbd94ac2a871840b022bada23e98fa\": rpc error: code = NotFound desc = could not find container \"06c4bf846bd93f27fcfc0e1370ee7441dddbd94ac2a871840b022bada23e98fa\": container with ID starting with 06c4bf846bd93f27fcfc0e1370ee7441dddbd94ac2a871840b022bada23e98fa not found: ID does not exist" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.462972 5002 scope.go:117] "RemoveContainer" containerID="d249b247b3be252d97cbcd924a57ebbd24c8000d9a6076ed1301b73b667f7218" Sep 30 12:38:26 crc kubenswrapper[5002]: E0930 12:38:26.463436 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d249b247b3be252d97cbcd924a57ebbd24c8000d9a6076ed1301b73b667f7218\": container with ID starting with d249b247b3be252d97cbcd924a57ebbd24c8000d9a6076ed1301b73b667f7218 not found: ID does not exist" containerID="d249b247b3be252d97cbcd924a57ebbd24c8000d9a6076ed1301b73b667f7218" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.463453 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d249b247b3be252d97cbcd924a57ebbd24c8000d9a6076ed1301b73b667f7218"} err="failed to get container status \"d249b247b3be252d97cbcd924a57ebbd24c8000d9a6076ed1301b73b667f7218\": rpc error: code = NotFound desc = could not find container \"d249b247b3be252d97cbcd924a57ebbd24c8000d9a6076ed1301b73b667f7218\": container with ID starting with d249b247b3be252d97cbcd924a57ebbd24c8000d9a6076ed1301b73b667f7218 not found: ID does not exist" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.463484 5002 scope.go:117] "RemoveContainer" containerID="0760716e035976c3070797a34595f5cfedda6594d61778a653840fc57fa39fa8" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.467275 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.479121 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.503679 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.521041 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:38:26 crc kubenswrapper[5002]: E0930 12:38:26.526228 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="accc1f38-16e0-4b04-98d4-ebece5b81989" containerName="dnsmasq-dns" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.526259 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="accc1f38-16e0-4b04-98d4-ebece5b81989" containerName="dnsmasq-dns" Sep 30 12:38:26 crc kubenswrapper[5002]: E0930 12:38:26.526288 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a446bd7c-bf30-4027-b9d9-9c7087dff156" containerName="ceilometer-central-agent" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.526296 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a446bd7c-bf30-4027-b9d9-9c7087dff156" containerName="ceilometer-central-agent" Sep 30 12:38:26 crc kubenswrapper[5002]: E0930 12:38:26.526307 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a446bd7c-bf30-4027-b9d9-9c7087dff156" containerName="sg-core" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.526313 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a446bd7c-bf30-4027-b9d9-9c7087dff156" containerName="sg-core" Sep 30 12:38:26 crc kubenswrapper[5002]: E0930 12:38:26.526324 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a446bd7c-bf30-4027-b9d9-9c7087dff156" containerName="proxy-httpd" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.526330 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a446bd7c-bf30-4027-b9d9-9c7087dff156" containerName="proxy-httpd" Sep 30 12:38:26 crc kubenswrapper[5002]: E0930 12:38:26.526344 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a446bd7c-bf30-4027-b9d9-9c7087dff156" containerName="ceilometer-notification-agent" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.526350 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a446bd7c-bf30-4027-b9d9-9c7087dff156" containerName="ceilometer-notification-agent" Sep 30 12:38:26 crc kubenswrapper[5002]: E0930 12:38:26.526358 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="accc1f38-16e0-4b04-98d4-ebece5b81989" containerName="init" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.526364 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="accc1f38-16e0-4b04-98d4-ebece5b81989" containerName="init" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.526575 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a446bd7c-bf30-4027-b9d9-9c7087dff156" containerName="ceilometer-notification-agent" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.526591 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a446bd7c-bf30-4027-b9d9-9c7087dff156" containerName="sg-core" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.526607 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a446bd7c-bf30-4027-b9d9-9c7087dff156" containerName="ceilometer-central-agent" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.526614 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="accc1f38-16e0-4b04-98d4-ebece5b81989" containerName="dnsmasq-dns" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.526628 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a446bd7c-bf30-4027-b9d9-9c7087dff156" containerName="proxy-httpd" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.528276 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.533765 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.533933 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.534629 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.624580 5002 scope.go:117] "RemoveContainer" containerID="61f2ba70d12da07efab04d7b5795905b7b1e32bbd84eccb5ae1c512962be419b" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.669385 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\") " pod="openstack/ceilometer-0" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.669610 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-log-httpd\") pod \"ceilometer-0\" (UID: \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\") " pod="openstack/ceilometer-0" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.669772 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-scripts\") pod \"ceilometer-0\" (UID: \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\") " pod="openstack/ceilometer-0" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.669852 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\") " pod="openstack/ceilometer-0" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.669986 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-run-httpd\") pod \"ceilometer-0\" (UID: \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\") " pod="openstack/ceilometer-0" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.670037 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-config-data\") pod \"ceilometer-0\" (UID: \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\") " pod="openstack/ceilometer-0" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.670085 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtjlq\" (UniqueName: \"kubernetes.io/projected/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-kube-api-access-dtjlq\") pod \"ceilometer-0\" (UID: \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\") " pod="openstack/ceilometer-0" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.672023 5002 scope.go:117] "RemoveContainer" containerID="34e4053fbd3e8b9d086d98db68f823df332fd0c31283a755880f78d523756a4c" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.740713 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a39b35d6-60c5-4ceb-b46e-9a00daf421ce" path="/var/lib/kubelet/pods/a39b35d6-60c5-4ceb-b46e-9a00daf421ce/volumes" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.741595 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a446bd7c-bf30-4027-b9d9-9c7087dff156" path="/var/lib/kubelet/pods/a446bd7c-bf30-4027-b9d9-9c7087dff156/volumes" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.742813 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="accc1f38-16e0-4b04-98d4-ebece5b81989" path="/var/lib/kubelet/pods/accc1f38-16e0-4b04-98d4-ebece5b81989/volumes" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.744285 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-f016-account-create-g57kg" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.755088 5002 scope.go:117] "RemoveContainer" containerID="8f5325aab6e4fa4343a483237b57a41a4f3025769f3bf19b8f0b56156e4b5d9c" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.772854 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-scripts\") pod \"ceilometer-0\" (UID: \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\") " pod="openstack/ceilometer-0" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.772903 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\") " pod="openstack/ceilometer-0" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.772955 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-run-httpd\") pod \"ceilometer-0\" (UID: \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\") " pod="openstack/ceilometer-0" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.772984 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-config-data\") pod \"ceilometer-0\" (UID: \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\") " pod="openstack/ceilometer-0" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.773002 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtjlq\" (UniqueName: \"kubernetes.io/projected/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-kube-api-access-dtjlq\") pod \"ceilometer-0\" (UID: \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\") " pod="openstack/ceilometer-0" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.773039 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\") " pod="openstack/ceilometer-0" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.773088 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-log-httpd\") pod \"ceilometer-0\" (UID: \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\") " pod="openstack/ceilometer-0" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.773413 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-log-httpd\") pod \"ceilometer-0\" (UID: \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\") " pod="openstack/ceilometer-0" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.773749 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-run-httpd\") pod \"ceilometer-0\" (UID: \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\") " pod="openstack/ceilometer-0" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.777249 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\") " pod="openstack/ceilometer-0" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.777335 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.777349 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.778603 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-89b0-account-create-qsmbt" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.793278 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-scripts\") pod \"ceilometer-0\" (UID: \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\") " pod="openstack/ceilometer-0" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.793805 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-config-data\") pod \"ceilometer-0\" (UID: \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\") " pod="openstack/ceilometer-0" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.795007 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\") " pod="openstack/ceilometer-0" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.796882 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtjlq\" (UniqueName: \"kubernetes.io/projected/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-kube-api-access-dtjlq\") pod \"ceilometer-0\" (UID: \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\") " pod="openstack/ceilometer-0" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.871979 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.875689 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sd6hx\" (UniqueName: \"kubernetes.io/projected/3fcc312f-04fb-44a6-bfba-f083655d42bb-kube-api-access-sd6hx\") pod \"3fcc312f-04fb-44a6-bfba-f083655d42bb\" (UID: \"3fcc312f-04fb-44a6-bfba-f083655d42bb\") " Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.875918 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-95c6l\" (UniqueName: \"kubernetes.io/projected/a9278220-9445-4f33-a4e4-a2224206b58e-kube-api-access-95c6l\") pod \"a9278220-9445-4f33-a4e4-a2224206b58e\" (UID: \"a9278220-9445-4f33-a4e4-a2224206b58e\") " Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.884022 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9278220-9445-4f33-a4e4-a2224206b58e-kube-api-access-95c6l" (OuterVolumeSpecName: "kube-api-access-95c6l") pod "a9278220-9445-4f33-a4e4-a2224206b58e" (UID: "a9278220-9445-4f33-a4e4-a2224206b58e"). InnerVolumeSpecName "kube-api-access-95c6l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.885036 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fcc312f-04fb-44a6-bfba-f083655d42bb-kube-api-access-sd6hx" (OuterVolumeSpecName: "kube-api-access-sd6hx") pod "3fcc312f-04fb-44a6-bfba-f083655d42bb" (UID: "3fcc312f-04fb-44a6-bfba-f083655d42bb"). InnerVolumeSpecName "kube-api-access-sd6hx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.978285 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sd6hx\" (UniqueName: \"kubernetes.io/projected/3fcc312f-04fb-44a6-bfba-f083655d42bb-kube-api-access-sd6hx\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:26 crc kubenswrapper[5002]: I0930 12:38:26.978314 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-95c6l\" (UniqueName: \"kubernetes.io/projected/a9278220-9445-4f33-a4e4-a2224206b58e-kube-api-access-95c6l\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.167724 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b5a818b5-30cc-4c21-b7c4-7563b49832eb","Type":"ContainerStarted","Data":"a6e29a42ec65e9e497411b8998b6e38d948606510958bf3a02a3108bf1b926a7"} Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.169558 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"17156d80-5354-430c-a7f9-294bae55a11c","Type":"ContainerStarted","Data":"224b35723015b53106e94943f3481a211b596459317c9491b36269bee567dde7"} Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.175420 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-f016-account-create-g57kg" event={"ID":"a9278220-9445-4f33-a4e4-a2224206b58e","Type":"ContainerDied","Data":"32b93be5a56996fbcddb3a916204b5b1abdaa45f73b627de79f2ca7c0c164e32"} Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.175464 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="32b93be5a56996fbcddb3a916204b5b1abdaa45f73b627de79f2ca7c0c164e32" Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.175552 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-f016-account-create-g57kg" Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.190202 5002 generic.go:334] "Generic (PLEG): container finished" podID="3a7935aa-3f70-4f2c-94ba-5bc122c7f028" containerID="32bc3ef204bd519ca25bb5737cf54719111d9cd5f520797285287a8325913608" exitCode=0 Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.190259 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3a7935aa-3f70-4f2c-94ba-5bc122c7f028","Type":"ContainerDied","Data":"32bc3ef204bd519ca25bb5737cf54719111d9cd5f520797285287a8325913608"} Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.199755 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-89b0-account-create-qsmbt" event={"ID":"3fcc312f-04fb-44a6-bfba-f083655d42bb","Type":"ContainerDied","Data":"2d9bcfa4127ff13b939d8aaff552511dbe4fc54f4a7ad775979e3443af38b727"} Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.199791 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d9bcfa4127ff13b939d8aaff552511dbe4fc54f4a7ad775979e3443af38b727" Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.199908 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-89b0-account-create-qsmbt" Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.205755 5002 generic.go:334] "Generic (PLEG): container finished" podID="10afbe7c-91df-4843-96b2-ad180a2b9cd1" containerID="6d26ffb59640b1d3e6ca14e606cdfaa97dc7ecba65a631be48df0d924d0febc3" exitCode=0 Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.205824 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-784489f99d-nqwcr" event={"ID":"10afbe7c-91df-4843-96b2-ad180a2b9cd1","Type":"ContainerDied","Data":"6d26ffb59640b1d3e6ca14e606cdfaa97dc7ecba65a631be48df0d924d0febc3"} Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.334056 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-gjsbc"] Sep 30 12:38:27 crc kubenswrapper[5002]: E0930 12:38:27.334705 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9278220-9445-4f33-a4e4-a2224206b58e" containerName="mariadb-account-create" Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.334727 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9278220-9445-4f33-a4e4-a2224206b58e" containerName="mariadb-account-create" Sep 30 12:38:27 crc kubenswrapper[5002]: E0930 12:38:27.334760 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fcc312f-04fb-44a6-bfba-f083655d42bb" containerName="mariadb-account-create" Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.334767 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fcc312f-04fb-44a6-bfba-f083655d42bb" containerName="mariadb-account-create" Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.335011 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fcc312f-04fb-44a6-bfba-f083655d42bb" containerName="mariadb-account-create" Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.335047 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9278220-9445-4f33-a4e4-a2224206b58e" containerName="mariadb-account-create" Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.335782 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-gjsbc" Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.337417 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.337780 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.340656 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-tdv86" Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.343490 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-gjsbc"] Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.391081 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c7c28f5-a6b2-4841-ac23-37c9167b3da4-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-gjsbc\" (UID: \"7c7c28f5-a6b2-4841-ac23-37c9167b3da4\") " pod="openstack/nova-cell0-conductor-db-sync-gjsbc" Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.391258 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c7c28f5-a6b2-4841-ac23-37c9167b3da4-scripts\") pod \"nova-cell0-conductor-db-sync-gjsbc\" (UID: \"7c7c28f5-a6b2-4841-ac23-37c9167b3da4\") " pod="openstack/nova-cell0-conductor-db-sync-gjsbc" Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.391319 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndpxv\" (UniqueName: \"kubernetes.io/projected/7c7c28f5-a6b2-4841-ac23-37c9167b3da4-kube-api-access-ndpxv\") pod \"nova-cell0-conductor-db-sync-gjsbc\" (UID: \"7c7c28f5-a6b2-4841-ac23-37c9167b3da4\") " pod="openstack/nova-cell0-conductor-db-sync-gjsbc" Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.391362 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c7c28f5-a6b2-4841-ac23-37c9167b3da4-config-data\") pod \"nova-cell0-conductor-db-sync-gjsbc\" (UID: \"7c7c28f5-a6b2-4841-ac23-37c9167b3da4\") " pod="openstack/nova-cell0-conductor-db-sync-gjsbc" Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.464054 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.493029 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c7c28f5-a6b2-4841-ac23-37c9167b3da4-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-gjsbc\" (UID: \"7c7c28f5-a6b2-4841-ac23-37c9167b3da4\") " pod="openstack/nova-cell0-conductor-db-sync-gjsbc" Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.493135 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c7c28f5-a6b2-4841-ac23-37c9167b3da4-scripts\") pod \"nova-cell0-conductor-db-sync-gjsbc\" (UID: \"7c7c28f5-a6b2-4841-ac23-37c9167b3da4\") " pod="openstack/nova-cell0-conductor-db-sync-gjsbc" Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.493185 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndpxv\" (UniqueName: \"kubernetes.io/projected/7c7c28f5-a6b2-4841-ac23-37c9167b3da4-kube-api-access-ndpxv\") pod \"nova-cell0-conductor-db-sync-gjsbc\" (UID: \"7c7c28f5-a6b2-4841-ac23-37c9167b3da4\") " pod="openstack/nova-cell0-conductor-db-sync-gjsbc" Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.493221 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c7c28f5-a6b2-4841-ac23-37c9167b3da4-config-data\") pod \"nova-cell0-conductor-db-sync-gjsbc\" (UID: \"7c7c28f5-a6b2-4841-ac23-37c9167b3da4\") " pod="openstack/nova-cell0-conductor-db-sync-gjsbc" Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.509303 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c7c28f5-a6b2-4841-ac23-37c9167b3da4-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-gjsbc\" (UID: \"7c7c28f5-a6b2-4841-ac23-37c9167b3da4\") " pod="openstack/nova-cell0-conductor-db-sync-gjsbc" Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.509757 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c7c28f5-a6b2-4841-ac23-37c9167b3da4-scripts\") pod \"nova-cell0-conductor-db-sync-gjsbc\" (UID: \"7c7c28f5-a6b2-4841-ac23-37c9167b3da4\") " pod="openstack/nova-cell0-conductor-db-sync-gjsbc" Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.514749 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c7c28f5-a6b2-4841-ac23-37c9167b3da4-config-data\") pod \"nova-cell0-conductor-db-sync-gjsbc\" (UID: \"7c7c28f5-a6b2-4841-ac23-37c9167b3da4\") " pod="openstack/nova-cell0-conductor-db-sync-gjsbc" Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.518600 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndpxv\" (UniqueName: \"kubernetes.io/projected/7c7c28f5-a6b2-4841-ac23-37c9167b3da4-kube-api-access-ndpxv\") pod \"nova-cell0-conductor-db-sync-gjsbc\" (UID: \"7c7c28f5-a6b2-4841-ac23-37c9167b3da4\") " pod="openstack/nova-cell0-conductor-db-sync-gjsbc" Sep 30 12:38:27 crc kubenswrapper[5002]: I0930 12:38:27.711609 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-gjsbc" Sep 30 12:38:28 crc kubenswrapper[5002]: I0930 12:38:28.227948 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"17156d80-5354-430c-a7f9-294bae55a11c","Type":"ContainerStarted","Data":"65ea4bc4a47fecc2aaf4bd178a04a36ac18712c3edeb13b9e28405ed083a130e"} Sep 30 12:38:28 crc kubenswrapper[5002]: I0930 12:38:28.228548 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"17156d80-5354-430c-a7f9-294bae55a11c","Type":"ContainerStarted","Data":"9386f654c37b19e5e550c236978d94e3e34f6db049ce99365b8f782a00c95123"} Sep 30 12:38:28 crc kubenswrapper[5002]: I0930 12:38:28.230105 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b","Type":"ContainerStarted","Data":"7a9444c00bcd06f5df59413cbe726221761d4ea2790ed21d7ef39f27c14d65b7"} Sep 30 12:38:28 crc kubenswrapper[5002]: I0930 12:38:28.233002 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b5a818b5-30cc-4c21-b7c4-7563b49832eb","Type":"ContainerStarted","Data":"1f3d6fbe8aa1e7f43f279e2a9a7ab4cddfeb5553e0b343f1e38381fd9cec161e"} Sep 30 12:38:28 crc kubenswrapper[5002]: I0930 12:38:28.249956 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.249938425 podStartE2EDuration="3.249938425s" podCreationTimestamp="2025-09-30 12:38:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:38:28.248434647 +0000 UTC m=+1082.498116803" watchObservedRunningTime="2025-09-30 12:38:28.249938425 +0000 UTC m=+1082.499620581" Sep 30 12:38:28 crc kubenswrapper[5002]: I0930 12:38:28.290372 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-gjsbc"] Sep 30 12:38:28 crc kubenswrapper[5002]: I0930 12:38:28.295432 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.295413283 podStartE2EDuration="4.295413283s" podCreationTimestamp="2025-09-30 12:38:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:38:28.274448789 +0000 UTC m=+1082.524130945" watchObservedRunningTime="2025-09-30 12:38:28.295413283 +0000 UTC m=+1082.545095429" Sep 30 12:38:29 crc kubenswrapper[5002]: I0930 12:38:29.249900 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b","Type":"ContainerStarted","Data":"727889ff5eb86ca44e5dd58da591ec66b48cb4ddbaafb7fb1642911f760ee5d7"} Sep 30 12:38:29 crc kubenswrapper[5002]: I0930 12:38:29.251423 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b","Type":"ContainerStarted","Data":"5c58eed796ed91fc7a1b07b4b83224358586cebd93860f4563051790ae2a8231"} Sep 30 12:38:29 crc kubenswrapper[5002]: I0930 12:38:29.251519 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-gjsbc" event={"ID":"7c7c28f5-a6b2-4841-ac23-37c9167b3da4","Type":"ContainerStarted","Data":"157ae5e9fea6a59a9b03f7ef98c16ae08e88f5c31ddcc8c3fff0e78fe4291b1e"} Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.268324 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.271839 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b","Type":"ContainerStarted","Data":"7f7bd99857c3a50d34af2dc8a8459203aab82d232f1ec321254e7c7027126e95"} Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.275389 5002 generic.go:334] "Generic (PLEG): container finished" podID="3a7935aa-3f70-4f2c-94ba-5bc122c7f028" containerID="6ea4672c16802a15d413efd5848fef1f96bf45563ce6122edb91280564f54666" exitCode=0 Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.275425 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3a7935aa-3f70-4f2c-94ba-5bc122c7f028","Type":"ContainerDied","Data":"6ea4672c16802a15d413efd5848fef1f96bf45563ce6122edb91280564f54666"} Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.275448 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3a7935aa-3f70-4f2c-94ba-5bc122c7f028","Type":"ContainerDied","Data":"abfc820957916df1b7df28480afdd8bdfb1518f68af932246ed74e63441cc87a"} Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.275465 5002 scope.go:117] "RemoveContainer" containerID="32bc3ef204bd519ca25bb5737cf54719111d9cd5f520797285287a8325913608" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.275605 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.350054 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-combined-ca-bundle\") pod \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\" (UID: \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\") " Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.350120 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-scripts\") pod \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\" (UID: \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\") " Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.350181 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mfdb7\" (UniqueName: \"kubernetes.io/projected/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-kube-api-access-mfdb7\") pod \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\" (UID: \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\") " Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.350210 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-config-data\") pod \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\" (UID: \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\") " Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.350252 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-config-data-custom\") pod \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\" (UID: \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\") " Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.350300 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-etc-machine-id\") pod \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\" (UID: \"3a7935aa-3f70-4f2c-94ba-5bc122c7f028\") " Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.350684 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "3a7935aa-3f70-4f2c-94ba-5bc122c7f028" (UID: "3a7935aa-3f70-4f2c-94ba-5bc122c7f028"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.359560 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-scripts" (OuterVolumeSpecName: "scripts") pod "3a7935aa-3f70-4f2c-94ba-5bc122c7f028" (UID: "3a7935aa-3f70-4f2c-94ba-5bc122c7f028"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.371646 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "3a7935aa-3f70-4f2c-94ba-5bc122c7f028" (UID: "3a7935aa-3f70-4f2c-94ba-5bc122c7f028"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.393750 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-kube-api-access-mfdb7" (OuterVolumeSpecName: "kube-api-access-mfdb7") pod "3a7935aa-3f70-4f2c-94ba-5bc122c7f028" (UID: "3a7935aa-3f70-4f2c-94ba-5bc122c7f028"). InnerVolumeSpecName "kube-api-access-mfdb7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.424878 5002 scope.go:117] "RemoveContainer" containerID="6ea4672c16802a15d413efd5848fef1f96bf45563ce6122edb91280564f54666" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.452213 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.452243 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mfdb7\" (UniqueName: \"kubernetes.io/projected/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-kube-api-access-mfdb7\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.452257 5002 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.452267 5002 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.478711 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3a7935aa-3f70-4f2c-94ba-5bc122c7f028" (UID: "3a7935aa-3f70-4f2c-94ba-5bc122c7f028"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.554457 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.565182 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-config-data" (OuterVolumeSpecName: "config-data") pod "3a7935aa-3f70-4f2c-94ba-5bc122c7f028" (UID: "3a7935aa-3f70-4f2c-94ba-5bc122c7f028"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.577644 5002 scope.go:117] "RemoveContainer" containerID="32bc3ef204bd519ca25bb5737cf54719111d9cd5f520797285287a8325913608" Sep 30 12:38:30 crc kubenswrapper[5002]: E0930 12:38:30.578210 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32bc3ef204bd519ca25bb5737cf54719111d9cd5f520797285287a8325913608\": container with ID starting with 32bc3ef204bd519ca25bb5737cf54719111d9cd5f520797285287a8325913608 not found: ID does not exist" containerID="32bc3ef204bd519ca25bb5737cf54719111d9cd5f520797285287a8325913608" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.578250 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32bc3ef204bd519ca25bb5737cf54719111d9cd5f520797285287a8325913608"} err="failed to get container status \"32bc3ef204bd519ca25bb5737cf54719111d9cd5f520797285287a8325913608\": rpc error: code = NotFound desc = could not find container \"32bc3ef204bd519ca25bb5737cf54719111d9cd5f520797285287a8325913608\": container with ID starting with 32bc3ef204bd519ca25bb5737cf54719111d9cd5f520797285287a8325913608 not found: ID does not exist" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.578274 5002 scope.go:117] "RemoveContainer" containerID="6ea4672c16802a15d413efd5848fef1f96bf45563ce6122edb91280564f54666" Sep 30 12:38:30 crc kubenswrapper[5002]: E0930 12:38:30.578721 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ea4672c16802a15d413efd5848fef1f96bf45563ce6122edb91280564f54666\": container with ID starting with 6ea4672c16802a15d413efd5848fef1f96bf45563ce6122edb91280564f54666 not found: ID does not exist" containerID="6ea4672c16802a15d413efd5848fef1f96bf45563ce6122edb91280564f54666" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.578759 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ea4672c16802a15d413efd5848fef1f96bf45563ce6122edb91280564f54666"} err="failed to get container status \"6ea4672c16802a15d413efd5848fef1f96bf45563ce6122edb91280564f54666\": rpc error: code = NotFound desc = could not find container \"6ea4672c16802a15d413efd5848fef1f96bf45563ce6122edb91280564f54666\": container with ID starting with 6ea4672c16802a15d413efd5848fef1f96bf45563ce6122edb91280564f54666 not found: ID does not exist" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.612422 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.624203 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.644374 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 12:38:30 crc kubenswrapper[5002]: E0930 12:38:30.644921 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a7935aa-3f70-4f2c-94ba-5bc122c7f028" containerName="cinder-scheduler" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.644983 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a7935aa-3f70-4f2c-94ba-5bc122c7f028" containerName="cinder-scheduler" Sep 30 12:38:30 crc kubenswrapper[5002]: E0930 12:38:30.645058 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a7935aa-3f70-4f2c-94ba-5bc122c7f028" containerName="probe" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.645106 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a7935aa-3f70-4f2c-94ba-5bc122c7f028" containerName="probe" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.645311 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a7935aa-3f70-4f2c-94ba-5bc122c7f028" containerName="cinder-scheduler" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.645393 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a7935aa-3f70-4f2c-94ba-5bc122c7f028" containerName="probe" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.646385 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.648494 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.656647 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9504c15f-ff11-4255-9371-b0481f692c0b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9504c15f-ff11-4255-9371-b0481f692c0b\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.656711 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9504c15f-ff11-4255-9371-b0481f692c0b-scripts\") pod \"cinder-scheduler-0\" (UID: \"9504c15f-ff11-4255-9371-b0481f692c0b\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.656786 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9504c15f-ff11-4255-9371-b0481f692c0b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9504c15f-ff11-4255-9371-b0481f692c0b\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.656860 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9504c15f-ff11-4255-9371-b0481f692c0b-config-data\") pod \"cinder-scheduler-0\" (UID: \"9504c15f-ff11-4255-9371-b0481f692c0b\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.656911 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9504c15f-ff11-4255-9371-b0481f692c0b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9504c15f-ff11-4255-9371-b0481f692c0b\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.659262 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvqfl\" (UniqueName: \"kubernetes.io/projected/9504c15f-ff11-4255-9371-b0481f692c0b-kube-api-access-lvqfl\") pod \"cinder-scheduler-0\" (UID: \"9504c15f-ff11-4255-9371-b0481f692c0b\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.659417 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a7935aa-3f70-4f2c-94ba-5bc122c7f028-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.664962 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.736301 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a7935aa-3f70-4f2c-94ba-5bc122c7f028" path="/var/lib/kubelet/pods/3a7935aa-3f70-4f2c-94ba-5bc122c7f028/volumes" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.761342 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9504c15f-ff11-4255-9371-b0481f692c0b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9504c15f-ff11-4255-9371-b0481f692c0b\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.761452 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9504c15f-ff11-4255-9371-b0481f692c0b-config-data\") pod \"cinder-scheduler-0\" (UID: \"9504c15f-ff11-4255-9371-b0481f692c0b\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.761515 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9504c15f-ff11-4255-9371-b0481f692c0b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9504c15f-ff11-4255-9371-b0481f692c0b\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.761546 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvqfl\" (UniqueName: \"kubernetes.io/projected/9504c15f-ff11-4255-9371-b0481f692c0b-kube-api-access-lvqfl\") pod \"cinder-scheduler-0\" (UID: \"9504c15f-ff11-4255-9371-b0481f692c0b\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.761646 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9504c15f-ff11-4255-9371-b0481f692c0b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9504c15f-ff11-4255-9371-b0481f692c0b\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.761682 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9504c15f-ff11-4255-9371-b0481f692c0b-scripts\") pod \"cinder-scheduler-0\" (UID: \"9504c15f-ff11-4255-9371-b0481f692c0b\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.764377 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9504c15f-ff11-4255-9371-b0481f692c0b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9504c15f-ff11-4255-9371-b0481f692c0b\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.766671 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9504c15f-ff11-4255-9371-b0481f692c0b-scripts\") pod \"cinder-scheduler-0\" (UID: \"9504c15f-ff11-4255-9371-b0481f692c0b\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.766846 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9504c15f-ff11-4255-9371-b0481f692c0b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9504c15f-ff11-4255-9371-b0481f692c0b\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.768337 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9504c15f-ff11-4255-9371-b0481f692c0b-config-data\") pod \"cinder-scheduler-0\" (UID: \"9504c15f-ff11-4255-9371-b0481f692c0b\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.768588 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9504c15f-ff11-4255-9371-b0481f692c0b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9504c15f-ff11-4255-9371-b0481f692c0b\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.779900 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvqfl\" (UniqueName: \"kubernetes.io/projected/9504c15f-ff11-4255-9371-b0481f692c0b-kube-api-access-lvqfl\") pod \"cinder-scheduler-0\" (UID: \"9504c15f-ff11-4255-9371-b0481f692c0b\") " pod="openstack/cinder-scheduler-0" Sep 30 12:38:30 crc kubenswrapper[5002]: I0930 12:38:30.967383 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 12:38:31 crc kubenswrapper[5002]: I0930 12:38:31.312788 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b","Type":"ContainerStarted","Data":"2c163d8cee1171098d2574112bb63467e416c54b37781a22a1a929656641d31f"} Sep 30 12:38:31 crc kubenswrapper[5002]: I0930 12:38:31.313092 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 12:38:31 crc kubenswrapper[5002]: I0930 12:38:31.335811 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.943448318 podStartE2EDuration="5.335794105s" podCreationTimestamp="2025-09-30 12:38:26 +0000 UTC" firstStartedPulling="2025-09-30 12:38:27.49240045 +0000 UTC m=+1081.742082596" lastFinishedPulling="2025-09-30 12:38:30.884746237 +0000 UTC m=+1085.134428383" observedRunningTime="2025-09-30 12:38:31.331118747 +0000 UTC m=+1085.580800893" watchObservedRunningTime="2025-09-30 12:38:31.335794105 +0000 UTC m=+1085.585476251" Sep 30 12:38:31 crc kubenswrapper[5002]: I0930 12:38:31.474331 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 12:38:32 crc kubenswrapper[5002]: I0930 12:38:32.099582 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:38:32 crc kubenswrapper[5002]: I0930 12:38:32.099810 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:38:32 crc kubenswrapper[5002]: I0930 12:38:32.323136 5002 generic.go:334] "Generic (PLEG): container finished" podID="10afbe7c-91df-4843-96b2-ad180a2b9cd1" containerID="f93a8c90672f5396b7f993dea6d68a3053142b40633703b7b6d3a573be0d8c22" exitCode=0 Sep 30 12:38:32 crc kubenswrapper[5002]: I0930 12:38:32.323463 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-784489f99d-nqwcr" event={"ID":"10afbe7c-91df-4843-96b2-ad180a2b9cd1","Type":"ContainerDied","Data":"f93a8c90672f5396b7f993dea6d68a3053142b40633703b7b6d3a573be0d8c22"} Sep 30 12:38:32 crc kubenswrapper[5002]: I0930 12:38:32.325885 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9504c15f-ff11-4255-9371-b0481f692c0b","Type":"ContainerStarted","Data":"dc5e77ecce58a9ef0c064ae8daac2a9a20df2a5f33e48bfa8507c3a1b9744b88"} Sep 30 12:38:32 crc kubenswrapper[5002]: I0930 12:38:32.325903 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9504c15f-ff11-4255-9371-b0481f692c0b","Type":"ContainerStarted","Data":"bc812681af3fd971ab05b01c4d22f3f277ab94578104f63c04a0f1184db91d7f"} Sep 30 12:38:32 crc kubenswrapper[5002]: I0930 12:38:32.436350 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Sep 30 12:38:33 crc kubenswrapper[5002]: I0930 12:38:33.340798 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9504c15f-ff11-4255-9371-b0481f692c0b","Type":"ContainerStarted","Data":"86823298f59041c8e96bf73735e5cc5b5cc9cb5fd9a3de8ed13f7bfba06b10eb"} Sep 30 12:38:34 crc kubenswrapper[5002]: I0930 12:38:34.450717 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 12:38:34 crc kubenswrapper[5002]: I0930 12:38:34.450983 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 12:38:34 crc kubenswrapper[5002]: I0930 12:38:34.492062 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 12:38:34 crc kubenswrapper[5002]: I0930 12:38:34.507652 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 12:38:34 crc kubenswrapper[5002]: I0930 12:38:34.527721 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.527702137 podStartE2EDuration="4.527702137s" podCreationTimestamp="2025-09-30 12:38:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:38:33.368910095 +0000 UTC m=+1087.618592261" watchObservedRunningTime="2025-09-30 12:38:34.527702137 +0000 UTC m=+1088.777384303" Sep 30 12:38:35 crc kubenswrapper[5002]: I0930 12:38:35.385980 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 12:38:35 crc kubenswrapper[5002]: I0930 12:38:35.386417 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 12:38:35 crc kubenswrapper[5002]: I0930 12:38:35.815018 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 12:38:35 crc kubenswrapper[5002]: I0930 12:38:35.815894 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 12:38:35 crc kubenswrapper[5002]: I0930 12:38:35.855689 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 12:38:35 crc kubenswrapper[5002]: I0930 12:38:35.863894 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 12:38:35 crc kubenswrapper[5002]: I0930 12:38:35.967464 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 30 12:38:36 crc kubenswrapper[5002]: I0930 12:38:36.399625 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 12:38:36 crc kubenswrapper[5002]: I0930 12:38:36.399668 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 12:38:37 crc kubenswrapper[5002]: I0930 12:38:37.131868 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 12:38:37 crc kubenswrapper[5002]: I0930 12:38:37.395846 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-784489f99d-nqwcr" Sep 30 12:38:37 crc kubenswrapper[5002]: I0930 12:38:37.407597 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-784489f99d-nqwcr" Sep 30 12:38:37 crc kubenswrapper[5002]: I0930 12:38:37.407667 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-784489f99d-nqwcr" event={"ID":"10afbe7c-91df-4843-96b2-ad180a2b9cd1","Type":"ContainerDied","Data":"87f876174b9cfab3af426047fd90a521beea812e7ce8865cf310fecf7e2baaea"} Sep 30 12:38:37 crc kubenswrapper[5002]: I0930 12:38:37.407913 5002 scope.go:117] "RemoveContainer" containerID="6d26ffb59640b1d3e6ca14e606cdfaa97dc7ecba65a631be48df0d924d0febc3" Sep 30 12:38:37 crc kubenswrapper[5002]: I0930 12:38:37.408101 5002 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 12:38:37 crc kubenswrapper[5002]: I0930 12:38:37.455001 5002 scope.go:117] "RemoveContainer" containerID="f93a8c90672f5396b7f993dea6d68a3053142b40633703b7b6d3a573be0d8c22" Sep 30 12:38:37 crc kubenswrapper[5002]: I0930 12:38:37.505588 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8dwlw\" (UniqueName: \"kubernetes.io/projected/10afbe7c-91df-4843-96b2-ad180a2b9cd1-kube-api-access-8dwlw\") pod \"10afbe7c-91df-4843-96b2-ad180a2b9cd1\" (UID: \"10afbe7c-91df-4843-96b2-ad180a2b9cd1\") " Sep 30 12:38:37 crc kubenswrapper[5002]: I0930 12:38:37.505654 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/10afbe7c-91df-4843-96b2-ad180a2b9cd1-ovndb-tls-certs\") pod \"10afbe7c-91df-4843-96b2-ad180a2b9cd1\" (UID: \"10afbe7c-91df-4843-96b2-ad180a2b9cd1\") " Sep 30 12:38:37 crc kubenswrapper[5002]: I0930 12:38:37.505697 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/10afbe7c-91df-4843-96b2-ad180a2b9cd1-httpd-config\") pod \"10afbe7c-91df-4843-96b2-ad180a2b9cd1\" (UID: \"10afbe7c-91df-4843-96b2-ad180a2b9cd1\") " Sep 30 12:38:37 crc kubenswrapper[5002]: I0930 12:38:37.505787 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/10afbe7c-91df-4843-96b2-ad180a2b9cd1-config\") pod \"10afbe7c-91df-4843-96b2-ad180a2b9cd1\" (UID: \"10afbe7c-91df-4843-96b2-ad180a2b9cd1\") " Sep 30 12:38:37 crc kubenswrapper[5002]: I0930 12:38:37.505941 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10afbe7c-91df-4843-96b2-ad180a2b9cd1-combined-ca-bundle\") pod \"10afbe7c-91df-4843-96b2-ad180a2b9cd1\" (UID: \"10afbe7c-91df-4843-96b2-ad180a2b9cd1\") " Sep 30 12:38:37 crc kubenswrapper[5002]: I0930 12:38:37.518950 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10afbe7c-91df-4843-96b2-ad180a2b9cd1-kube-api-access-8dwlw" (OuterVolumeSpecName: "kube-api-access-8dwlw") pod "10afbe7c-91df-4843-96b2-ad180a2b9cd1" (UID: "10afbe7c-91df-4843-96b2-ad180a2b9cd1"). InnerVolumeSpecName "kube-api-access-8dwlw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:38:37 crc kubenswrapper[5002]: I0930 12:38:37.531692 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10afbe7c-91df-4843-96b2-ad180a2b9cd1-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "10afbe7c-91df-4843-96b2-ad180a2b9cd1" (UID: "10afbe7c-91df-4843-96b2-ad180a2b9cd1"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:37 crc kubenswrapper[5002]: I0930 12:38:37.608443 5002 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/10afbe7c-91df-4843-96b2-ad180a2b9cd1-httpd-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:37 crc kubenswrapper[5002]: I0930 12:38:37.608484 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8dwlw\" (UniqueName: \"kubernetes.io/projected/10afbe7c-91df-4843-96b2-ad180a2b9cd1-kube-api-access-8dwlw\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:37 crc kubenswrapper[5002]: I0930 12:38:37.608787 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10afbe7c-91df-4843-96b2-ad180a2b9cd1-config" (OuterVolumeSpecName: "config") pod "10afbe7c-91df-4843-96b2-ad180a2b9cd1" (UID: "10afbe7c-91df-4843-96b2-ad180a2b9cd1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:37 crc kubenswrapper[5002]: I0930 12:38:37.640099 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10afbe7c-91df-4843-96b2-ad180a2b9cd1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "10afbe7c-91df-4843-96b2-ad180a2b9cd1" (UID: "10afbe7c-91df-4843-96b2-ad180a2b9cd1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:37 crc kubenswrapper[5002]: I0930 12:38:37.670631 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10afbe7c-91df-4843-96b2-ad180a2b9cd1-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "10afbe7c-91df-4843-96b2-ad180a2b9cd1" (UID: "10afbe7c-91df-4843-96b2-ad180a2b9cd1"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:37 crc kubenswrapper[5002]: I0930 12:38:37.710322 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/10afbe7c-91df-4843-96b2-ad180a2b9cd1-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:37 crc kubenswrapper[5002]: I0930 12:38:37.710370 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10afbe7c-91df-4843-96b2-ad180a2b9cd1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:37 crc kubenswrapper[5002]: I0930 12:38:37.710381 5002 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/10afbe7c-91df-4843-96b2-ad180a2b9cd1-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:37 crc kubenswrapper[5002]: I0930 12:38:37.737090 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 12:38:37 crc kubenswrapper[5002]: I0930 12:38:37.757962 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-784489f99d-nqwcr"] Sep 30 12:38:37 crc kubenswrapper[5002]: I0930 12:38:37.764796 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-784489f99d-nqwcr"] Sep 30 12:38:38 crc kubenswrapper[5002]: I0930 12:38:38.418023 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-gjsbc" event={"ID":"7c7c28f5-a6b2-4841-ac23-37c9167b3da4","Type":"ContainerStarted","Data":"63f825fabef76f0e73f621d2a6fddaa3a422602c6bde058a11e8940acb387cd5"} Sep 30 12:38:38 crc kubenswrapper[5002]: I0930 12:38:38.420203 5002 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 12:38:38 crc kubenswrapper[5002]: I0930 12:38:38.420234 5002 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 12:38:38 crc kubenswrapper[5002]: I0930 12:38:38.422842 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 12:38:38 crc kubenswrapper[5002]: I0930 12:38:38.441250 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-gjsbc" podStartSLOduration=2.26397717 podStartE2EDuration="11.44123213s" podCreationTimestamp="2025-09-30 12:38:27 +0000 UTC" firstStartedPulling="2025-09-30 12:38:28.278078202 +0000 UTC m=+1082.527760348" lastFinishedPulling="2025-09-30 12:38:37.455333162 +0000 UTC m=+1091.705015308" observedRunningTime="2025-09-30 12:38:38.436607552 +0000 UTC m=+1092.686289718" watchObservedRunningTime="2025-09-30 12:38:38.44123213 +0000 UTC m=+1092.690914276" Sep 30 12:38:38 crc kubenswrapper[5002]: I0930 12:38:38.593432 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 12:38:38 crc kubenswrapper[5002]: I0930 12:38:38.694390 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10afbe7c-91df-4843-96b2-ad180a2b9cd1" path="/var/lib/kubelet/pods/10afbe7c-91df-4843-96b2-ad180a2b9cd1/volumes" Sep 30 12:38:41 crc kubenswrapper[5002]: I0930 12:38:41.197429 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 30 12:38:41 crc kubenswrapper[5002]: I0930 12:38:41.806583 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:38:41 crc kubenswrapper[5002]: I0930 12:38:41.806968 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b" containerName="ceilometer-central-agent" containerID="cri-o://5c58eed796ed91fc7a1b07b4b83224358586cebd93860f4563051790ae2a8231" gracePeriod=30 Sep 30 12:38:41 crc kubenswrapper[5002]: I0930 12:38:41.807090 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b" containerName="proxy-httpd" containerID="cri-o://2c163d8cee1171098d2574112bb63467e416c54b37781a22a1a929656641d31f" gracePeriod=30 Sep 30 12:38:41 crc kubenswrapper[5002]: I0930 12:38:41.807108 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b" containerName="sg-core" containerID="cri-o://7f7bd99857c3a50d34af2dc8a8459203aab82d232f1ec321254e7c7027126e95" gracePeriod=30 Sep 30 12:38:41 crc kubenswrapper[5002]: I0930 12:38:41.807258 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b" containerName="ceilometer-notification-agent" containerID="cri-o://727889ff5eb86ca44e5dd58da591ec66b48cb4ddbaafb7fb1642911f760ee5d7" gracePeriod=30 Sep 30 12:38:41 crc kubenswrapper[5002]: I0930 12:38:41.906425 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.180:3000/\": read tcp 10.217.0.2:51844->10.217.0.180:3000: read: connection reset by peer" Sep 30 12:38:42 crc kubenswrapper[5002]: I0930 12:38:42.516130 5002 generic.go:334] "Generic (PLEG): container finished" podID="ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b" containerID="2c163d8cee1171098d2574112bb63467e416c54b37781a22a1a929656641d31f" exitCode=0 Sep 30 12:38:42 crc kubenswrapper[5002]: I0930 12:38:42.517310 5002 generic.go:334] "Generic (PLEG): container finished" podID="ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b" containerID="7f7bd99857c3a50d34af2dc8a8459203aab82d232f1ec321254e7c7027126e95" exitCode=2 Sep 30 12:38:42 crc kubenswrapper[5002]: I0930 12:38:42.517391 5002 generic.go:334] "Generic (PLEG): container finished" podID="ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b" containerID="5c58eed796ed91fc7a1b07b4b83224358586cebd93860f4563051790ae2a8231" exitCode=0 Sep 30 12:38:42 crc kubenswrapper[5002]: I0930 12:38:42.516228 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b","Type":"ContainerDied","Data":"2c163d8cee1171098d2574112bb63467e416c54b37781a22a1a929656641d31f"} Sep 30 12:38:42 crc kubenswrapper[5002]: I0930 12:38:42.517553 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b","Type":"ContainerDied","Data":"7f7bd99857c3a50d34af2dc8a8459203aab82d232f1ec321254e7c7027126e95"} Sep 30 12:38:42 crc kubenswrapper[5002]: I0930 12:38:42.517638 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b","Type":"ContainerDied","Data":"5c58eed796ed91fc7a1b07b4b83224358586cebd93860f4563051790ae2a8231"} Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.316821 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.460265 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-sg-core-conf-yaml\") pod \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\" (UID: \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\") " Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.460328 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-log-httpd\") pod \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\" (UID: \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\") " Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.460370 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-run-httpd\") pod \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\" (UID: \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\") " Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.460454 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-config-data\") pod \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\" (UID: \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\") " Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.460542 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-scripts\") pod \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\" (UID: \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\") " Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.460565 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-combined-ca-bundle\") pod \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\" (UID: \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\") " Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.460592 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dtjlq\" (UniqueName: \"kubernetes.io/projected/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-kube-api-access-dtjlq\") pod \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\" (UID: \"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b\") " Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.461059 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b" (UID: "ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.462014 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b" (UID: "ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.467627 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-kube-api-access-dtjlq" (OuterVolumeSpecName: "kube-api-access-dtjlq") pod "ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b" (UID: "ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b"). InnerVolumeSpecName "kube-api-access-dtjlq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.478608 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-scripts" (OuterVolumeSpecName: "scripts") pod "ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b" (UID: "ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.498659 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b" (UID: "ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.563048 5002 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.563082 5002 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.563091 5002 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.563100 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.563112 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dtjlq\" (UniqueName: \"kubernetes.io/projected/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-kube-api-access-dtjlq\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.572555 5002 generic.go:334] "Generic (PLEG): container finished" podID="ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b" containerID="727889ff5eb86ca44e5dd58da591ec66b48cb4ddbaafb7fb1642911f760ee5d7" exitCode=0 Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.572605 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b","Type":"ContainerDied","Data":"727889ff5eb86ca44e5dd58da591ec66b48cb4ddbaafb7fb1642911f760ee5d7"} Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.572634 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b","Type":"ContainerDied","Data":"7a9444c00bcd06f5df59413cbe726221761d4ea2790ed21d7ef39f27c14d65b7"} Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.572674 5002 scope.go:117] "RemoveContainer" containerID="2c163d8cee1171098d2574112bb63467e416c54b37781a22a1a929656641d31f" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.572899 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.588846 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b" (UID: "ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.597436 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-config-data" (OuterVolumeSpecName: "config-data") pod "ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b" (UID: "ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.612616 5002 scope.go:117] "RemoveContainer" containerID="7f7bd99857c3a50d34af2dc8a8459203aab82d232f1ec321254e7c7027126e95" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.642032 5002 scope.go:117] "RemoveContainer" containerID="727889ff5eb86ca44e5dd58da591ec66b48cb4ddbaafb7fb1642911f760ee5d7" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.664916 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.664954 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.668340 5002 scope.go:117] "RemoveContainer" containerID="5c58eed796ed91fc7a1b07b4b83224358586cebd93860f4563051790ae2a8231" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.692041 5002 scope.go:117] "RemoveContainer" containerID="2c163d8cee1171098d2574112bb63467e416c54b37781a22a1a929656641d31f" Sep 30 12:38:44 crc kubenswrapper[5002]: E0930 12:38:44.692700 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c163d8cee1171098d2574112bb63467e416c54b37781a22a1a929656641d31f\": container with ID starting with 2c163d8cee1171098d2574112bb63467e416c54b37781a22a1a929656641d31f not found: ID does not exist" containerID="2c163d8cee1171098d2574112bb63467e416c54b37781a22a1a929656641d31f" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.692744 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c163d8cee1171098d2574112bb63467e416c54b37781a22a1a929656641d31f"} err="failed to get container status \"2c163d8cee1171098d2574112bb63467e416c54b37781a22a1a929656641d31f\": rpc error: code = NotFound desc = could not find container \"2c163d8cee1171098d2574112bb63467e416c54b37781a22a1a929656641d31f\": container with ID starting with 2c163d8cee1171098d2574112bb63467e416c54b37781a22a1a929656641d31f not found: ID does not exist" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.692772 5002 scope.go:117] "RemoveContainer" containerID="7f7bd99857c3a50d34af2dc8a8459203aab82d232f1ec321254e7c7027126e95" Sep 30 12:38:44 crc kubenswrapper[5002]: E0930 12:38:44.693261 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f7bd99857c3a50d34af2dc8a8459203aab82d232f1ec321254e7c7027126e95\": container with ID starting with 7f7bd99857c3a50d34af2dc8a8459203aab82d232f1ec321254e7c7027126e95 not found: ID does not exist" containerID="7f7bd99857c3a50d34af2dc8a8459203aab82d232f1ec321254e7c7027126e95" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.693288 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f7bd99857c3a50d34af2dc8a8459203aab82d232f1ec321254e7c7027126e95"} err="failed to get container status \"7f7bd99857c3a50d34af2dc8a8459203aab82d232f1ec321254e7c7027126e95\": rpc error: code = NotFound desc = could not find container \"7f7bd99857c3a50d34af2dc8a8459203aab82d232f1ec321254e7c7027126e95\": container with ID starting with 7f7bd99857c3a50d34af2dc8a8459203aab82d232f1ec321254e7c7027126e95 not found: ID does not exist" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.693306 5002 scope.go:117] "RemoveContainer" containerID="727889ff5eb86ca44e5dd58da591ec66b48cb4ddbaafb7fb1642911f760ee5d7" Sep 30 12:38:44 crc kubenswrapper[5002]: E0930 12:38:44.694915 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"727889ff5eb86ca44e5dd58da591ec66b48cb4ddbaafb7fb1642911f760ee5d7\": container with ID starting with 727889ff5eb86ca44e5dd58da591ec66b48cb4ddbaafb7fb1642911f760ee5d7 not found: ID does not exist" containerID="727889ff5eb86ca44e5dd58da591ec66b48cb4ddbaafb7fb1642911f760ee5d7" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.694960 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"727889ff5eb86ca44e5dd58da591ec66b48cb4ddbaafb7fb1642911f760ee5d7"} err="failed to get container status \"727889ff5eb86ca44e5dd58da591ec66b48cb4ddbaafb7fb1642911f760ee5d7\": rpc error: code = NotFound desc = could not find container \"727889ff5eb86ca44e5dd58da591ec66b48cb4ddbaafb7fb1642911f760ee5d7\": container with ID starting with 727889ff5eb86ca44e5dd58da591ec66b48cb4ddbaafb7fb1642911f760ee5d7 not found: ID does not exist" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.694993 5002 scope.go:117] "RemoveContainer" containerID="5c58eed796ed91fc7a1b07b4b83224358586cebd93860f4563051790ae2a8231" Sep 30 12:38:44 crc kubenswrapper[5002]: E0930 12:38:44.695264 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c58eed796ed91fc7a1b07b4b83224358586cebd93860f4563051790ae2a8231\": container with ID starting with 5c58eed796ed91fc7a1b07b4b83224358586cebd93860f4563051790ae2a8231 not found: ID does not exist" containerID="5c58eed796ed91fc7a1b07b4b83224358586cebd93860f4563051790ae2a8231" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.695292 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c58eed796ed91fc7a1b07b4b83224358586cebd93860f4563051790ae2a8231"} err="failed to get container status \"5c58eed796ed91fc7a1b07b4b83224358586cebd93860f4563051790ae2a8231\": rpc error: code = NotFound desc = could not find container \"5c58eed796ed91fc7a1b07b4b83224358586cebd93860f4563051790ae2a8231\": container with ID starting with 5c58eed796ed91fc7a1b07b4b83224358586cebd93860f4563051790ae2a8231 not found: ID does not exist" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.902901 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.914329 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.950603 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:38:44 crc kubenswrapper[5002]: E0930 12:38:44.950925 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10afbe7c-91df-4843-96b2-ad180a2b9cd1" containerName="neutron-httpd" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.950941 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="10afbe7c-91df-4843-96b2-ad180a2b9cd1" containerName="neutron-httpd" Sep 30 12:38:44 crc kubenswrapper[5002]: E0930 12:38:44.950953 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b" containerName="ceilometer-notification-agent" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.950961 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b" containerName="ceilometer-notification-agent" Sep 30 12:38:44 crc kubenswrapper[5002]: E0930 12:38:44.950976 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b" containerName="ceilometer-central-agent" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.950983 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b" containerName="ceilometer-central-agent" Sep 30 12:38:44 crc kubenswrapper[5002]: E0930 12:38:44.950995 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b" containerName="sg-core" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.951002 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b" containerName="sg-core" Sep 30 12:38:44 crc kubenswrapper[5002]: E0930 12:38:44.951013 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b" containerName="proxy-httpd" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.951019 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b" containerName="proxy-httpd" Sep 30 12:38:44 crc kubenswrapper[5002]: E0930 12:38:44.951030 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10afbe7c-91df-4843-96b2-ad180a2b9cd1" containerName="neutron-api" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.951036 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="10afbe7c-91df-4843-96b2-ad180a2b9cd1" containerName="neutron-api" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.951204 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b" containerName="ceilometer-central-agent" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.951215 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b" containerName="ceilometer-notification-agent" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.951222 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b" containerName="proxy-httpd" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.951237 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="10afbe7c-91df-4843-96b2-ad180a2b9cd1" containerName="neutron-httpd" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.951250 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b" containerName="sg-core" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.951256 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="10afbe7c-91df-4843-96b2-ad180a2b9cd1" containerName="neutron-api" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.952714 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.956844 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.956919 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 12:38:44 crc kubenswrapper[5002]: I0930 12:38:44.964135 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:38:45 crc kubenswrapper[5002]: I0930 12:38:45.071394 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41e74a80-7481-4f57-a850-4ec8869b051b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"41e74a80-7481-4f57-a850-4ec8869b051b\") " pod="openstack/ceilometer-0" Sep 30 12:38:45 crc kubenswrapper[5002]: I0930 12:38:45.071458 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7v8z6\" (UniqueName: \"kubernetes.io/projected/41e74a80-7481-4f57-a850-4ec8869b051b-kube-api-access-7v8z6\") pod \"ceilometer-0\" (UID: \"41e74a80-7481-4f57-a850-4ec8869b051b\") " pod="openstack/ceilometer-0" Sep 30 12:38:45 crc kubenswrapper[5002]: I0930 12:38:45.071573 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41e74a80-7481-4f57-a850-4ec8869b051b-scripts\") pod \"ceilometer-0\" (UID: \"41e74a80-7481-4f57-a850-4ec8869b051b\") " pod="openstack/ceilometer-0" Sep 30 12:38:45 crc kubenswrapper[5002]: I0930 12:38:45.071827 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/41e74a80-7481-4f57-a850-4ec8869b051b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"41e74a80-7481-4f57-a850-4ec8869b051b\") " pod="openstack/ceilometer-0" Sep 30 12:38:45 crc kubenswrapper[5002]: I0930 12:38:45.071895 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41e74a80-7481-4f57-a850-4ec8869b051b-log-httpd\") pod \"ceilometer-0\" (UID: \"41e74a80-7481-4f57-a850-4ec8869b051b\") " pod="openstack/ceilometer-0" Sep 30 12:38:45 crc kubenswrapper[5002]: I0930 12:38:45.072027 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41e74a80-7481-4f57-a850-4ec8869b051b-run-httpd\") pod \"ceilometer-0\" (UID: \"41e74a80-7481-4f57-a850-4ec8869b051b\") " pod="openstack/ceilometer-0" Sep 30 12:38:45 crc kubenswrapper[5002]: I0930 12:38:45.072081 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41e74a80-7481-4f57-a850-4ec8869b051b-config-data\") pod \"ceilometer-0\" (UID: \"41e74a80-7481-4f57-a850-4ec8869b051b\") " pod="openstack/ceilometer-0" Sep 30 12:38:45 crc kubenswrapper[5002]: I0930 12:38:45.158130 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-6f8969955b-64x4f" Sep 30 12:38:45 crc kubenswrapper[5002]: I0930 12:38:45.161181 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-6f8969955b-64x4f" Sep 30 12:38:45 crc kubenswrapper[5002]: I0930 12:38:45.173418 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/41e74a80-7481-4f57-a850-4ec8869b051b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"41e74a80-7481-4f57-a850-4ec8869b051b\") " pod="openstack/ceilometer-0" Sep 30 12:38:45 crc kubenswrapper[5002]: I0930 12:38:45.173483 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41e74a80-7481-4f57-a850-4ec8869b051b-log-httpd\") pod \"ceilometer-0\" (UID: \"41e74a80-7481-4f57-a850-4ec8869b051b\") " pod="openstack/ceilometer-0" Sep 30 12:38:45 crc kubenswrapper[5002]: I0930 12:38:45.173535 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41e74a80-7481-4f57-a850-4ec8869b051b-run-httpd\") pod \"ceilometer-0\" (UID: \"41e74a80-7481-4f57-a850-4ec8869b051b\") " pod="openstack/ceilometer-0" Sep 30 12:38:45 crc kubenswrapper[5002]: I0930 12:38:45.173563 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41e74a80-7481-4f57-a850-4ec8869b051b-config-data\") pod \"ceilometer-0\" (UID: \"41e74a80-7481-4f57-a850-4ec8869b051b\") " pod="openstack/ceilometer-0" Sep 30 12:38:45 crc kubenswrapper[5002]: I0930 12:38:45.173595 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41e74a80-7481-4f57-a850-4ec8869b051b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"41e74a80-7481-4f57-a850-4ec8869b051b\") " pod="openstack/ceilometer-0" Sep 30 12:38:45 crc kubenswrapper[5002]: I0930 12:38:45.173624 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7v8z6\" (UniqueName: \"kubernetes.io/projected/41e74a80-7481-4f57-a850-4ec8869b051b-kube-api-access-7v8z6\") pod \"ceilometer-0\" (UID: \"41e74a80-7481-4f57-a850-4ec8869b051b\") " pod="openstack/ceilometer-0" Sep 30 12:38:45 crc kubenswrapper[5002]: I0930 12:38:45.173652 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41e74a80-7481-4f57-a850-4ec8869b051b-scripts\") pod \"ceilometer-0\" (UID: \"41e74a80-7481-4f57-a850-4ec8869b051b\") " pod="openstack/ceilometer-0" Sep 30 12:38:45 crc kubenswrapper[5002]: I0930 12:38:45.174043 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41e74a80-7481-4f57-a850-4ec8869b051b-log-httpd\") pod \"ceilometer-0\" (UID: \"41e74a80-7481-4f57-a850-4ec8869b051b\") " pod="openstack/ceilometer-0" Sep 30 12:38:45 crc kubenswrapper[5002]: I0930 12:38:45.174101 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41e74a80-7481-4f57-a850-4ec8869b051b-run-httpd\") pod \"ceilometer-0\" (UID: \"41e74a80-7481-4f57-a850-4ec8869b051b\") " pod="openstack/ceilometer-0" Sep 30 12:38:45 crc kubenswrapper[5002]: I0930 12:38:45.178465 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41e74a80-7481-4f57-a850-4ec8869b051b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"41e74a80-7481-4f57-a850-4ec8869b051b\") " pod="openstack/ceilometer-0" Sep 30 12:38:45 crc kubenswrapper[5002]: I0930 12:38:45.178587 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41e74a80-7481-4f57-a850-4ec8869b051b-scripts\") pod \"ceilometer-0\" (UID: \"41e74a80-7481-4f57-a850-4ec8869b051b\") " pod="openstack/ceilometer-0" Sep 30 12:38:45 crc kubenswrapper[5002]: I0930 12:38:45.180972 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/41e74a80-7481-4f57-a850-4ec8869b051b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"41e74a80-7481-4f57-a850-4ec8869b051b\") " pod="openstack/ceilometer-0" Sep 30 12:38:45 crc kubenswrapper[5002]: I0930 12:38:45.181765 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41e74a80-7481-4f57-a850-4ec8869b051b-config-data\") pod \"ceilometer-0\" (UID: \"41e74a80-7481-4f57-a850-4ec8869b051b\") " pod="openstack/ceilometer-0" Sep 30 12:38:45 crc kubenswrapper[5002]: I0930 12:38:45.201368 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7v8z6\" (UniqueName: \"kubernetes.io/projected/41e74a80-7481-4f57-a850-4ec8869b051b-kube-api-access-7v8z6\") pod \"ceilometer-0\" (UID: \"41e74a80-7481-4f57-a850-4ec8869b051b\") " pod="openstack/ceilometer-0" Sep 30 12:38:45 crc kubenswrapper[5002]: I0930 12:38:45.301744 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:38:45 crc kubenswrapper[5002]: I0930 12:38:45.808791 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:38:45 crc kubenswrapper[5002]: W0930 12:38:45.812641 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod41e74a80_7481_4f57_a850_4ec8869b051b.slice/crio-774cf643b3032390b0cb9d20fedbfcf43d9f5582a3bb4356e3e95d263c6e0a7b WatchSource:0}: Error finding container 774cf643b3032390b0cb9d20fedbfcf43d9f5582a3bb4356e3e95d263c6e0a7b: Status 404 returned error can't find the container with id 774cf643b3032390b0cb9d20fedbfcf43d9f5582a3bb4356e3e95d263c6e0a7b Sep 30 12:38:46 crc kubenswrapper[5002]: I0930 12:38:46.610390 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41e74a80-7481-4f57-a850-4ec8869b051b","Type":"ContainerStarted","Data":"10d4e31d5501f0c51057168196c67336f97b4a5261efb2b0a45e4a69efd2e5f2"} Sep 30 12:38:46 crc kubenswrapper[5002]: I0930 12:38:46.610640 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41e74a80-7481-4f57-a850-4ec8869b051b","Type":"ContainerStarted","Data":"774cf643b3032390b0cb9d20fedbfcf43d9f5582a3bb4356e3e95d263c6e0a7b"} Sep 30 12:38:46 crc kubenswrapper[5002]: I0930 12:38:46.691129 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b" path="/var/lib/kubelet/pods/ff36d6c9-849a-4c71-a8e2-8dbae8a96d3b/volumes" Sep 30 12:38:47 crc kubenswrapper[5002]: I0930 12:38:47.627314 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41e74a80-7481-4f57-a850-4ec8869b051b","Type":"ContainerStarted","Data":"7cbec1b5a6de0c9ec3905aa36e7aa394ce1e87f2c0e245bc9ad3cbc819aa98c2"} Sep 30 12:38:48 crc kubenswrapper[5002]: I0930 12:38:48.638437 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41e74a80-7481-4f57-a850-4ec8869b051b","Type":"ContainerStarted","Data":"1f8a359691d288f18fa9853d1752e24eefbc6a9bf7fa0dc0b6eb200dded6f486"} Sep 30 12:38:50 crc kubenswrapper[5002]: I0930 12:38:50.665958 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41e74a80-7481-4f57-a850-4ec8869b051b","Type":"ContainerStarted","Data":"9a7a7bd43ec94f061b74be5256ae7d5f73436f3afbe2eb25a0adc0818e90f18d"} Sep 30 12:38:50 crc kubenswrapper[5002]: I0930 12:38:50.666579 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 12:38:50 crc kubenswrapper[5002]: I0930 12:38:50.703973 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.6756929830000002 podStartE2EDuration="6.703939063s" podCreationTimestamp="2025-09-30 12:38:44 +0000 UTC" firstStartedPulling="2025-09-30 12:38:45.814944319 +0000 UTC m=+1100.064626465" lastFinishedPulling="2025-09-30 12:38:49.843190389 +0000 UTC m=+1104.092872545" observedRunningTime="2025-09-30 12:38:50.691762476 +0000 UTC m=+1104.941444652" watchObservedRunningTime="2025-09-30 12:38:50.703939063 +0000 UTC m=+1104.953621249" Sep 30 12:38:52 crc kubenswrapper[5002]: I0930 12:38:52.705321 5002 generic.go:334] "Generic (PLEG): container finished" podID="7c7c28f5-a6b2-4841-ac23-37c9167b3da4" containerID="63f825fabef76f0e73f621d2a6fddaa3a422602c6bde058a11e8940acb387cd5" exitCode=0 Sep 30 12:38:52 crc kubenswrapper[5002]: I0930 12:38:52.711836 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-gjsbc" event={"ID":"7c7c28f5-a6b2-4841-ac23-37c9167b3da4","Type":"ContainerDied","Data":"63f825fabef76f0e73f621d2a6fddaa3a422602c6bde058a11e8940acb387cd5"} Sep 30 12:38:54 crc kubenswrapper[5002]: I0930 12:38:54.087571 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-gjsbc" Sep 30 12:38:54 crc kubenswrapper[5002]: I0930 12:38:54.158077 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c7c28f5-a6b2-4841-ac23-37c9167b3da4-scripts\") pod \"7c7c28f5-a6b2-4841-ac23-37c9167b3da4\" (UID: \"7c7c28f5-a6b2-4841-ac23-37c9167b3da4\") " Sep 30 12:38:54 crc kubenswrapper[5002]: I0930 12:38:54.158381 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c7c28f5-a6b2-4841-ac23-37c9167b3da4-combined-ca-bundle\") pod \"7c7c28f5-a6b2-4841-ac23-37c9167b3da4\" (UID: \"7c7c28f5-a6b2-4841-ac23-37c9167b3da4\") " Sep 30 12:38:54 crc kubenswrapper[5002]: I0930 12:38:54.158498 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ndpxv\" (UniqueName: \"kubernetes.io/projected/7c7c28f5-a6b2-4841-ac23-37c9167b3da4-kube-api-access-ndpxv\") pod \"7c7c28f5-a6b2-4841-ac23-37c9167b3da4\" (UID: \"7c7c28f5-a6b2-4841-ac23-37c9167b3da4\") " Sep 30 12:38:54 crc kubenswrapper[5002]: I0930 12:38:54.158594 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c7c28f5-a6b2-4841-ac23-37c9167b3da4-config-data\") pod \"7c7c28f5-a6b2-4841-ac23-37c9167b3da4\" (UID: \"7c7c28f5-a6b2-4841-ac23-37c9167b3da4\") " Sep 30 12:38:54 crc kubenswrapper[5002]: I0930 12:38:54.164549 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c7c28f5-a6b2-4841-ac23-37c9167b3da4-scripts" (OuterVolumeSpecName: "scripts") pod "7c7c28f5-a6b2-4841-ac23-37c9167b3da4" (UID: "7c7c28f5-a6b2-4841-ac23-37c9167b3da4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:54 crc kubenswrapper[5002]: I0930 12:38:54.170644 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c7c28f5-a6b2-4841-ac23-37c9167b3da4-kube-api-access-ndpxv" (OuterVolumeSpecName: "kube-api-access-ndpxv") pod "7c7c28f5-a6b2-4841-ac23-37c9167b3da4" (UID: "7c7c28f5-a6b2-4841-ac23-37c9167b3da4"). InnerVolumeSpecName "kube-api-access-ndpxv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:38:54 crc kubenswrapper[5002]: I0930 12:38:54.188644 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c7c28f5-a6b2-4841-ac23-37c9167b3da4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7c7c28f5-a6b2-4841-ac23-37c9167b3da4" (UID: "7c7c28f5-a6b2-4841-ac23-37c9167b3da4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:54 crc kubenswrapper[5002]: I0930 12:38:54.193172 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c7c28f5-a6b2-4841-ac23-37c9167b3da4-config-data" (OuterVolumeSpecName: "config-data") pod "7c7c28f5-a6b2-4841-ac23-37c9167b3da4" (UID: "7c7c28f5-a6b2-4841-ac23-37c9167b3da4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:38:54 crc kubenswrapper[5002]: I0930 12:38:54.261179 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c7c28f5-a6b2-4841-ac23-37c9167b3da4-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:54 crc kubenswrapper[5002]: I0930 12:38:54.261383 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c7c28f5-a6b2-4841-ac23-37c9167b3da4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:54 crc kubenswrapper[5002]: I0930 12:38:54.261395 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ndpxv\" (UniqueName: \"kubernetes.io/projected/7c7c28f5-a6b2-4841-ac23-37c9167b3da4-kube-api-access-ndpxv\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:54 crc kubenswrapper[5002]: I0930 12:38:54.261405 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c7c28f5-a6b2-4841-ac23-37c9167b3da4-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:38:54 crc kubenswrapper[5002]: I0930 12:38:54.726744 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-gjsbc" event={"ID":"7c7c28f5-a6b2-4841-ac23-37c9167b3da4","Type":"ContainerDied","Data":"157ae5e9fea6a59a9b03f7ef98c16ae08e88f5c31ddcc8c3fff0e78fe4291b1e"} Sep 30 12:38:54 crc kubenswrapper[5002]: I0930 12:38:54.726782 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="157ae5e9fea6a59a9b03f7ef98c16ae08e88f5c31ddcc8c3fff0e78fe4291b1e" Sep 30 12:38:54 crc kubenswrapper[5002]: I0930 12:38:54.726799 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-gjsbc" Sep 30 12:38:54 crc kubenswrapper[5002]: I0930 12:38:54.822052 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 12:38:54 crc kubenswrapper[5002]: E0930 12:38:54.822511 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c7c28f5-a6b2-4841-ac23-37c9167b3da4" containerName="nova-cell0-conductor-db-sync" Sep 30 12:38:54 crc kubenswrapper[5002]: I0930 12:38:54.822528 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c7c28f5-a6b2-4841-ac23-37c9167b3da4" containerName="nova-cell0-conductor-db-sync" Sep 30 12:38:54 crc kubenswrapper[5002]: I0930 12:38:54.822804 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c7c28f5-a6b2-4841-ac23-37c9167b3da4" containerName="nova-cell0-conductor-db-sync" Sep 30 12:38:54 crc kubenswrapper[5002]: I0930 12:38:54.823561 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 12:38:54 crc kubenswrapper[5002]: I0930 12:38:54.826070 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 30 12:38:54 crc kubenswrapper[5002]: I0930 12:38:54.826070 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-tdv86" Sep 30 12:38:54 crc kubenswrapper[5002]: I0930 12:38:54.843323 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 12:38:54 crc kubenswrapper[5002]: I0930 12:38:54.972671 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37740f37-5646-4a17-9356-12430b5fc45f-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"37740f37-5646-4a17-9356-12430b5fc45f\") " pod="openstack/nova-cell0-conductor-0" Sep 30 12:38:54 crc kubenswrapper[5002]: I0930 12:38:54.972793 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37740f37-5646-4a17-9356-12430b5fc45f-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"37740f37-5646-4a17-9356-12430b5fc45f\") " pod="openstack/nova-cell0-conductor-0" Sep 30 12:38:54 crc kubenswrapper[5002]: I0930 12:38:54.972889 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hd7kj\" (UniqueName: \"kubernetes.io/projected/37740f37-5646-4a17-9356-12430b5fc45f-kube-api-access-hd7kj\") pod \"nova-cell0-conductor-0\" (UID: \"37740f37-5646-4a17-9356-12430b5fc45f\") " pod="openstack/nova-cell0-conductor-0" Sep 30 12:38:55 crc kubenswrapper[5002]: I0930 12:38:55.074148 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37740f37-5646-4a17-9356-12430b5fc45f-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"37740f37-5646-4a17-9356-12430b5fc45f\") " pod="openstack/nova-cell0-conductor-0" Sep 30 12:38:55 crc kubenswrapper[5002]: I0930 12:38:55.074246 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hd7kj\" (UniqueName: \"kubernetes.io/projected/37740f37-5646-4a17-9356-12430b5fc45f-kube-api-access-hd7kj\") pod \"nova-cell0-conductor-0\" (UID: \"37740f37-5646-4a17-9356-12430b5fc45f\") " pod="openstack/nova-cell0-conductor-0" Sep 30 12:38:55 crc kubenswrapper[5002]: I0930 12:38:55.074291 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37740f37-5646-4a17-9356-12430b5fc45f-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"37740f37-5646-4a17-9356-12430b5fc45f\") " pod="openstack/nova-cell0-conductor-0" Sep 30 12:38:55 crc kubenswrapper[5002]: I0930 12:38:55.078404 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37740f37-5646-4a17-9356-12430b5fc45f-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"37740f37-5646-4a17-9356-12430b5fc45f\") " pod="openstack/nova-cell0-conductor-0" Sep 30 12:38:55 crc kubenswrapper[5002]: I0930 12:38:55.079966 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37740f37-5646-4a17-9356-12430b5fc45f-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"37740f37-5646-4a17-9356-12430b5fc45f\") " pod="openstack/nova-cell0-conductor-0" Sep 30 12:38:55 crc kubenswrapper[5002]: I0930 12:38:55.096546 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hd7kj\" (UniqueName: \"kubernetes.io/projected/37740f37-5646-4a17-9356-12430b5fc45f-kube-api-access-hd7kj\") pod \"nova-cell0-conductor-0\" (UID: \"37740f37-5646-4a17-9356-12430b5fc45f\") " pod="openstack/nova-cell0-conductor-0" Sep 30 12:38:55 crc kubenswrapper[5002]: I0930 12:38:55.140312 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 12:38:55 crc kubenswrapper[5002]: I0930 12:38:55.695698 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 12:38:55 crc kubenswrapper[5002]: I0930 12:38:55.743020 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"37740f37-5646-4a17-9356-12430b5fc45f","Type":"ContainerStarted","Data":"ee17b08681d8a883890e89f18dee50767abc9de4961f0b7ba8fe32a87902f73a"} Sep 30 12:38:56 crc kubenswrapper[5002]: I0930 12:38:56.758115 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"37740f37-5646-4a17-9356-12430b5fc45f","Type":"ContainerStarted","Data":"65179b3898ef52603d11103e6ea6e668fd3a8c59ca34a54aa70d9f60dfd06915"} Sep 30 12:38:56 crc kubenswrapper[5002]: I0930 12:38:56.759162 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Sep 30 12:38:56 crc kubenswrapper[5002]: I0930 12:38:56.792859 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.792838043 podStartE2EDuration="2.792838043s" podCreationTimestamp="2025-09-30 12:38:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:38:56.785183791 +0000 UTC m=+1111.034865937" watchObservedRunningTime="2025-09-30 12:38:56.792838043 +0000 UTC m=+1111.042520189" Sep 30 12:38:56 crc kubenswrapper[5002]: I0930 12:38:56.876057 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 12:38:58 crc kubenswrapper[5002]: I0930 12:38:58.528643 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:38:58 crc kubenswrapper[5002]: I0930 12:38:58.529585 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="41e74a80-7481-4f57-a850-4ec8869b051b" containerName="ceilometer-central-agent" containerID="cri-o://10d4e31d5501f0c51057168196c67336f97b4a5261efb2b0a45e4a69efd2e5f2" gracePeriod=30 Sep 30 12:38:58 crc kubenswrapper[5002]: I0930 12:38:58.529639 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="41e74a80-7481-4f57-a850-4ec8869b051b" containerName="ceilometer-notification-agent" containerID="cri-o://7cbec1b5a6de0c9ec3905aa36e7aa394ce1e87f2c0e245bc9ad3cbc819aa98c2" gracePeriod=30 Sep 30 12:38:58 crc kubenswrapper[5002]: I0930 12:38:58.529645 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="41e74a80-7481-4f57-a850-4ec8869b051b" containerName="sg-core" containerID="cri-o://1f8a359691d288f18fa9853d1752e24eefbc6a9bf7fa0dc0b6eb200dded6f486" gracePeriod=30 Sep 30 12:38:58 crc kubenswrapper[5002]: I0930 12:38:58.529597 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="41e74a80-7481-4f57-a850-4ec8869b051b" containerName="proxy-httpd" containerID="cri-o://9a7a7bd43ec94f061b74be5256ae7d5f73436f3afbe2eb25a0adc0818e90f18d" gracePeriod=30 Sep 30 12:38:58 crc kubenswrapper[5002]: I0930 12:38:58.777430 5002 generic.go:334] "Generic (PLEG): container finished" podID="41e74a80-7481-4f57-a850-4ec8869b051b" containerID="9a7a7bd43ec94f061b74be5256ae7d5f73436f3afbe2eb25a0adc0818e90f18d" exitCode=0 Sep 30 12:38:58 crc kubenswrapper[5002]: I0930 12:38:58.777461 5002 generic.go:334] "Generic (PLEG): container finished" podID="41e74a80-7481-4f57-a850-4ec8869b051b" containerID="1f8a359691d288f18fa9853d1752e24eefbc6a9bf7fa0dc0b6eb200dded6f486" exitCode=2 Sep 30 12:38:58 crc kubenswrapper[5002]: I0930 12:38:58.777520 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41e74a80-7481-4f57-a850-4ec8869b051b","Type":"ContainerDied","Data":"9a7a7bd43ec94f061b74be5256ae7d5f73436f3afbe2eb25a0adc0818e90f18d"} Sep 30 12:38:58 crc kubenswrapper[5002]: I0930 12:38:58.777558 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41e74a80-7481-4f57-a850-4ec8869b051b","Type":"ContainerDied","Data":"1f8a359691d288f18fa9853d1752e24eefbc6a9bf7fa0dc0b6eb200dded6f486"} Sep 30 12:38:58 crc kubenswrapper[5002]: I0930 12:38:58.777630 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="37740f37-5646-4a17-9356-12430b5fc45f" containerName="nova-cell0-conductor-conductor" containerID="cri-o://65179b3898ef52603d11103e6ea6e668fd3a8c59ca34a54aa70d9f60dfd06915" gracePeriod=30 Sep 30 12:38:59 crc kubenswrapper[5002]: I0930 12:38:59.801076 5002 generic.go:334] "Generic (PLEG): container finished" podID="37740f37-5646-4a17-9356-12430b5fc45f" containerID="65179b3898ef52603d11103e6ea6e668fd3a8c59ca34a54aa70d9f60dfd06915" exitCode=0 Sep 30 12:38:59 crc kubenswrapper[5002]: I0930 12:38:59.801619 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"37740f37-5646-4a17-9356-12430b5fc45f","Type":"ContainerDied","Data":"65179b3898ef52603d11103e6ea6e668fd3a8c59ca34a54aa70d9f60dfd06915"} Sep 30 12:38:59 crc kubenswrapper[5002]: I0930 12:38:59.805161 5002 generic.go:334] "Generic (PLEG): container finished" podID="41e74a80-7481-4f57-a850-4ec8869b051b" containerID="7cbec1b5a6de0c9ec3905aa36e7aa394ce1e87f2c0e245bc9ad3cbc819aa98c2" exitCode=0 Sep 30 12:38:59 crc kubenswrapper[5002]: I0930 12:38:59.805202 5002 generic.go:334] "Generic (PLEG): container finished" podID="41e74a80-7481-4f57-a850-4ec8869b051b" containerID="10d4e31d5501f0c51057168196c67336f97b4a5261efb2b0a45e4a69efd2e5f2" exitCode=0 Sep 30 12:38:59 crc kubenswrapper[5002]: I0930 12:38:59.805229 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41e74a80-7481-4f57-a850-4ec8869b051b","Type":"ContainerDied","Data":"7cbec1b5a6de0c9ec3905aa36e7aa394ce1e87f2c0e245bc9ad3cbc819aa98c2"} Sep 30 12:38:59 crc kubenswrapper[5002]: I0930 12:38:59.805264 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41e74a80-7481-4f57-a850-4ec8869b051b","Type":"ContainerDied","Data":"10d4e31d5501f0c51057168196c67336f97b4a5261efb2b0a45e4a69efd2e5f2"} Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.029046 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.035632 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.164336 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41e74a80-7481-4f57-a850-4ec8869b051b-combined-ca-bundle\") pod \"41e74a80-7481-4f57-a850-4ec8869b051b\" (UID: \"41e74a80-7481-4f57-a850-4ec8869b051b\") " Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.164554 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41e74a80-7481-4f57-a850-4ec8869b051b-run-httpd\") pod \"41e74a80-7481-4f57-a850-4ec8869b051b\" (UID: \"41e74a80-7481-4f57-a850-4ec8869b051b\") " Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.164654 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41e74a80-7481-4f57-a850-4ec8869b051b-log-httpd\") pod \"41e74a80-7481-4f57-a850-4ec8869b051b\" (UID: \"41e74a80-7481-4f57-a850-4ec8869b051b\") " Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.164763 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37740f37-5646-4a17-9356-12430b5fc45f-combined-ca-bundle\") pod \"37740f37-5646-4a17-9356-12430b5fc45f\" (UID: \"37740f37-5646-4a17-9356-12430b5fc45f\") " Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.164833 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37740f37-5646-4a17-9356-12430b5fc45f-config-data\") pod \"37740f37-5646-4a17-9356-12430b5fc45f\" (UID: \"37740f37-5646-4a17-9356-12430b5fc45f\") " Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.165059 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41e74a80-7481-4f57-a850-4ec8869b051b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "41e74a80-7481-4f57-a850-4ec8869b051b" (UID: "41e74a80-7481-4f57-a850-4ec8869b051b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.165187 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41e74a80-7481-4f57-a850-4ec8869b051b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "41e74a80-7481-4f57-a850-4ec8869b051b" (UID: "41e74a80-7481-4f57-a850-4ec8869b051b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.165784 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/41e74a80-7481-4f57-a850-4ec8869b051b-sg-core-conf-yaml\") pod \"41e74a80-7481-4f57-a850-4ec8869b051b\" (UID: \"41e74a80-7481-4f57-a850-4ec8869b051b\") " Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.165914 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7v8z6\" (UniqueName: \"kubernetes.io/projected/41e74a80-7481-4f57-a850-4ec8869b051b-kube-api-access-7v8z6\") pod \"41e74a80-7481-4f57-a850-4ec8869b051b\" (UID: \"41e74a80-7481-4f57-a850-4ec8869b051b\") " Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.166029 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hd7kj\" (UniqueName: \"kubernetes.io/projected/37740f37-5646-4a17-9356-12430b5fc45f-kube-api-access-hd7kj\") pod \"37740f37-5646-4a17-9356-12430b5fc45f\" (UID: \"37740f37-5646-4a17-9356-12430b5fc45f\") " Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.166120 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41e74a80-7481-4f57-a850-4ec8869b051b-scripts\") pod \"41e74a80-7481-4f57-a850-4ec8869b051b\" (UID: \"41e74a80-7481-4f57-a850-4ec8869b051b\") " Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.166182 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41e74a80-7481-4f57-a850-4ec8869b051b-config-data\") pod \"41e74a80-7481-4f57-a850-4ec8869b051b\" (UID: \"41e74a80-7481-4f57-a850-4ec8869b051b\") " Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.167087 5002 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41e74a80-7481-4f57-a850-4ec8869b051b-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.167128 5002 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41e74a80-7481-4f57-a850-4ec8869b051b-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.170844 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37740f37-5646-4a17-9356-12430b5fc45f-kube-api-access-hd7kj" (OuterVolumeSpecName: "kube-api-access-hd7kj") pod "37740f37-5646-4a17-9356-12430b5fc45f" (UID: "37740f37-5646-4a17-9356-12430b5fc45f"). InnerVolumeSpecName "kube-api-access-hd7kj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.177666 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41e74a80-7481-4f57-a850-4ec8869b051b-scripts" (OuterVolumeSpecName: "scripts") pod "41e74a80-7481-4f57-a850-4ec8869b051b" (UID: "41e74a80-7481-4f57-a850-4ec8869b051b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.193637 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41e74a80-7481-4f57-a850-4ec8869b051b-kube-api-access-7v8z6" (OuterVolumeSpecName: "kube-api-access-7v8z6") pod "41e74a80-7481-4f57-a850-4ec8869b051b" (UID: "41e74a80-7481-4f57-a850-4ec8869b051b"). InnerVolumeSpecName "kube-api-access-7v8z6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.198099 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41e74a80-7481-4f57-a850-4ec8869b051b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "41e74a80-7481-4f57-a850-4ec8869b051b" (UID: "41e74a80-7481-4f57-a850-4ec8869b051b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.207659 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37740f37-5646-4a17-9356-12430b5fc45f-config-data" (OuterVolumeSpecName: "config-data") pod "37740f37-5646-4a17-9356-12430b5fc45f" (UID: "37740f37-5646-4a17-9356-12430b5fc45f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.212779 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37740f37-5646-4a17-9356-12430b5fc45f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "37740f37-5646-4a17-9356-12430b5fc45f" (UID: "37740f37-5646-4a17-9356-12430b5fc45f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.266976 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41e74a80-7481-4f57-a850-4ec8869b051b-config-data" (OuterVolumeSpecName: "config-data") pod "41e74a80-7481-4f57-a850-4ec8869b051b" (UID: "41e74a80-7481-4f57-a850-4ec8869b051b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.269195 5002 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/41e74a80-7481-4f57-a850-4ec8869b051b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.269236 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7v8z6\" (UniqueName: \"kubernetes.io/projected/41e74a80-7481-4f57-a850-4ec8869b051b-kube-api-access-7v8z6\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.269255 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hd7kj\" (UniqueName: \"kubernetes.io/projected/37740f37-5646-4a17-9356-12430b5fc45f-kube-api-access-hd7kj\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.269267 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41e74a80-7481-4f57-a850-4ec8869b051b-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.269278 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41e74a80-7481-4f57-a850-4ec8869b051b-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.269290 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37740f37-5646-4a17-9356-12430b5fc45f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.269300 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37740f37-5646-4a17-9356-12430b5fc45f-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.277118 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41e74a80-7481-4f57-a850-4ec8869b051b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "41e74a80-7481-4f57-a850-4ec8869b051b" (UID: "41e74a80-7481-4f57-a850-4ec8869b051b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.371546 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41e74a80-7481-4f57-a850-4ec8869b051b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.815586 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41e74a80-7481-4f57-a850-4ec8869b051b","Type":"ContainerDied","Data":"774cf643b3032390b0cb9d20fedbfcf43d9f5582a3bb4356e3e95d263c6e0a7b"} Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.815651 5002 scope.go:117] "RemoveContainer" containerID="9a7a7bd43ec94f061b74be5256ae7d5f73436f3afbe2eb25a0adc0818e90f18d" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.815771 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.818906 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"37740f37-5646-4a17-9356-12430b5fc45f","Type":"ContainerDied","Data":"ee17b08681d8a883890e89f18dee50767abc9de4961f0b7ba8fe32a87902f73a"} Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.818959 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.863573 5002 scope.go:117] "RemoveContainer" containerID="1f8a359691d288f18fa9853d1752e24eefbc6a9bf7fa0dc0b6eb200dded6f486" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.866989 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.896794 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.900649 5002 scope.go:117] "RemoveContainer" containerID="7cbec1b5a6de0c9ec3905aa36e7aa394ce1e87f2c0e245bc9ad3cbc819aa98c2" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.904695 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.913049 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.922638 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:39:00 crc kubenswrapper[5002]: E0930 12:39:00.923284 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41e74a80-7481-4f57-a850-4ec8869b051b" containerName="sg-core" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.923304 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="41e74a80-7481-4f57-a850-4ec8869b051b" containerName="sg-core" Sep 30 12:39:00 crc kubenswrapper[5002]: E0930 12:39:00.923320 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41e74a80-7481-4f57-a850-4ec8869b051b" containerName="ceilometer-central-agent" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.923354 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="41e74a80-7481-4f57-a850-4ec8869b051b" containerName="ceilometer-central-agent" Sep 30 12:39:00 crc kubenswrapper[5002]: E0930 12:39:00.923373 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41e74a80-7481-4f57-a850-4ec8869b051b" containerName="ceilometer-notification-agent" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.923382 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="41e74a80-7481-4f57-a850-4ec8869b051b" containerName="ceilometer-notification-agent" Sep 30 12:39:00 crc kubenswrapper[5002]: E0930 12:39:00.923448 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41e74a80-7481-4f57-a850-4ec8869b051b" containerName="proxy-httpd" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.923458 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="41e74a80-7481-4f57-a850-4ec8869b051b" containerName="proxy-httpd" Sep 30 12:39:00 crc kubenswrapper[5002]: E0930 12:39:00.923516 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37740f37-5646-4a17-9356-12430b5fc45f" containerName="nova-cell0-conductor-conductor" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.923527 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="37740f37-5646-4a17-9356-12430b5fc45f" containerName="nova-cell0-conductor-conductor" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.923862 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="41e74a80-7481-4f57-a850-4ec8869b051b" containerName="proxy-httpd" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.923885 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="41e74a80-7481-4f57-a850-4ec8869b051b" containerName="ceilometer-central-agent" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.923930 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="41e74a80-7481-4f57-a850-4ec8869b051b" containerName="ceilometer-notification-agent" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.923951 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="37740f37-5646-4a17-9356-12430b5fc45f" containerName="nova-cell0-conductor-conductor" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.923965 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="41e74a80-7481-4f57-a850-4ec8869b051b" containerName="sg-core" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.926239 5002 scope.go:117] "RemoveContainer" containerID="10d4e31d5501f0c51057168196c67336f97b4a5261efb2b0a45e4a69efd2e5f2" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.927290 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.930476 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.930766 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.930981 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.931973 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.934679 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-tdv86" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.934691 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.940694 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.950285 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.957190 5002 scope.go:117] "RemoveContainer" containerID="65179b3898ef52603d11103e6ea6e668fd3a8c59ca34a54aa70d9f60dfd06915" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.989079 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-run-httpd\") pod \"ceilometer-0\" (UID: \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\") " pod="openstack/ceilometer-0" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.989210 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/972b29bd-a22c-486e-ac29-6d075c3b26a7-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"972b29bd-a22c-486e-ac29-6d075c3b26a7\") " pod="openstack/nova-cell0-conductor-0" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.989533 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-config-data\") pod \"ceilometer-0\" (UID: \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\") " pod="openstack/ceilometer-0" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.989566 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\") " pod="openstack/ceilometer-0" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.989636 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-scripts\") pod \"ceilometer-0\" (UID: \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\") " pod="openstack/ceilometer-0" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.989744 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/972b29bd-a22c-486e-ac29-6d075c3b26a7-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"972b29bd-a22c-486e-ac29-6d075c3b26a7\") " pod="openstack/nova-cell0-conductor-0" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.989776 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgp2l\" (UniqueName: \"kubernetes.io/projected/972b29bd-a22c-486e-ac29-6d075c3b26a7-kube-api-access-fgp2l\") pod \"nova-cell0-conductor-0\" (UID: \"972b29bd-a22c-486e-ac29-6d075c3b26a7\") " pod="openstack/nova-cell0-conductor-0" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.989847 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-log-httpd\") pod \"ceilometer-0\" (UID: \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\") " pod="openstack/ceilometer-0" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.989876 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\") " pod="openstack/ceilometer-0" Sep 30 12:39:00 crc kubenswrapper[5002]: I0930 12:39:00.990088 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frjjk\" (UniqueName: \"kubernetes.io/projected/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-kube-api-access-frjjk\") pod \"ceilometer-0\" (UID: \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\") " pod="openstack/ceilometer-0" Sep 30 12:39:01 crc kubenswrapper[5002]: I0930 12:39:01.091614 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frjjk\" (UniqueName: \"kubernetes.io/projected/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-kube-api-access-frjjk\") pod \"ceilometer-0\" (UID: \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\") " pod="openstack/ceilometer-0" Sep 30 12:39:01 crc kubenswrapper[5002]: I0930 12:39:01.091708 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-run-httpd\") pod \"ceilometer-0\" (UID: \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\") " pod="openstack/ceilometer-0" Sep 30 12:39:01 crc kubenswrapper[5002]: I0930 12:39:01.091758 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/972b29bd-a22c-486e-ac29-6d075c3b26a7-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"972b29bd-a22c-486e-ac29-6d075c3b26a7\") " pod="openstack/nova-cell0-conductor-0" Sep 30 12:39:01 crc kubenswrapper[5002]: I0930 12:39:01.091821 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-config-data\") pod \"ceilometer-0\" (UID: \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\") " pod="openstack/ceilometer-0" Sep 30 12:39:01 crc kubenswrapper[5002]: I0930 12:39:01.091839 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\") " pod="openstack/ceilometer-0" Sep 30 12:39:01 crc kubenswrapper[5002]: I0930 12:39:01.091889 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-scripts\") pod \"ceilometer-0\" (UID: \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\") " pod="openstack/ceilometer-0" Sep 30 12:39:01 crc kubenswrapper[5002]: I0930 12:39:01.091929 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/972b29bd-a22c-486e-ac29-6d075c3b26a7-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"972b29bd-a22c-486e-ac29-6d075c3b26a7\") " pod="openstack/nova-cell0-conductor-0" Sep 30 12:39:01 crc kubenswrapper[5002]: I0930 12:39:01.091963 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgp2l\" (UniqueName: \"kubernetes.io/projected/972b29bd-a22c-486e-ac29-6d075c3b26a7-kube-api-access-fgp2l\") pod \"nova-cell0-conductor-0\" (UID: \"972b29bd-a22c-486e-ac29-6d075c3b26a7\") " pod="openstack/nova-cell0-conductor-0" Sep 30 12:39:01 crc kubenswrapper[5002]: I0930 12:39:01.091996 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-log-httpd\") pod \"ceilometer-0\" (UID: \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\") " pod="openstack/ceilometer-0" Sep 30 12:39:01 crc kubenswrapper[5002]: I0930 12:39:01.092016 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\") " pod="openstack/ceilometer-0" Sep 30 12:39:01 crc kubenswrapper[5002]: I0930 12:39:01.092776 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-run-httpd\") pod \"ceilometer-0\" (UID: \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\") " pod="openstack/ceilometer-0" Sep 30 12:39:01 crc kubenswrapper[5002]: I0930 12:39:01.093371 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-log-httpd\") pod \"ceilometer-0\" (UID: \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\") " pod="openstack/ceilometer-0" Sep 30 12:39:01 crc kubenswrapper[5002]: I0930 12:39:01.096871 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-config-data\") pod \"ceilometer-0\" (UID: \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\") " pod="openstack/ceilometer-0" Sep 30 12:39:01 crc kubenswrapper[5002]: I0930 12:39:01.097117 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/972b29bd-a22c-486e-ac29-6d075c3b26a7-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"972b29bd-a22c-486e-ac29-6d075c3b26a7\") " pod="openstack/nova-cell0-conductor-0" Sep 30 12:39:01 crc kubenswrapper[5002]: I0930 12:39:01.098056 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\") " pod="openstack/ceilometer-0" Sep 30 12:39:01 crc kubenswrapper[5002]: I0930 12:39:01.100269 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\") " pod="openstack/ceilometer-0" Sep 30 12:39:01 crc kubenswrapper[5002]: I0930 12:39:01.111194 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/972b29bd-a22c-486e-ac29-6d075c3b26a7-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"972b29bd-a22c-486e-ac29-6d075c3b26a7\") " pod="openstack/nova-cell0-conductor-0" Sep 30 12:39:01 crc kubenswrapper[5002]: I0930 12:39:01.112871 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-scripts\") pod \"ceilometer-0\" (UID: \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\") " pod="openstack/ceilometer-0" Sep 30 12:39:01 crc kubenswrapper[5002]: I0930 12:39:01.117777 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgp2l\" (UniqueName: \"kubernetes.io/projected/972b29bd-a22c-486e-ac29-6d075c3b26a7-kube-api-access-fgp2l\") pod \"nova-cell0-conductor-0\" (UID: \"972b29bd-a22c-486e-ac29-6d075c3b26a7\") " pod="openstack/nova-cell0-conductor-0" Sep 30 12:39:01 crc kubenswrapper[5002]: I0930 12:39:01.119584 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frjjk\" (UniqueName: \"kubernetes.io/projected/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-kube-api-access-frjjk\") pod \"ceilometer-0\" (UID: \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\") " pod="openstack/ceilometer-0" Sep 30 12:39:01 crc kubenswrapper[5002]: I0930 12:39:01.259276 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:39:01 crc kubenswrapper[5002]: I0930 12:39:01.272307 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 12:39:01 crc kubenswrapper[5002]: I0930 12:39:01.734213 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:39:01 crc kubenswrapper[5002]: I0930 12:39:01.810515 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 12:39:01 crc kubenswrapper[5002]: I0930 12:39:01.835959 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"972b29bd-a22c-486e-ac29-6d075c3b26a7","Type":"ContainerStarted","Data":"187e3308b1ee11b5edc11b27923dc45ff1c8a9e84243633440aa609b9ac03e16"} Sep 30 12:39:01 crc kubenswrapper[5002]: I0930 12:39:01.836834 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1","Type":"ContainerStarted","Data":"d2ede50b14c6db96ded7e994beb6cedf59a961c04a05d967c99f1ae546ab5534"} Sep 30 12:39:02 crc kubenswrapper[5002]: I0930 12:39:02.098014 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:39:02 crc kubenswrapper[5002]: I0930 12:39:02.098382 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:39:02 crc kubenswrapper[5002]: I0930 12:39:02.098421 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" Sep 30 12:39:02 crc kubenswrapper[5002]: I0930 12:39:02.099091 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9886eed05d26a67a26421b788732fc6013ba20fcc19e4bc6732960e19d9a03e8"} pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 12:39:02 crc kubenswrapper[5002]: I0930 12:39:02.099146 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" containerID="cri-o://9886eed05d26a67a26421b788732fc6013ba20fcc19e4bc6732960e19d9a03e8" gracePeriod=600 Sep 30 12:39:02 crc kubenswrapper[5002]: I0930 12:39:02.714367 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37740f37-5646-4a17-9356-12430b5fc45f" path="/var/lib/kubelet/pods/37740f37-5646-4a17-9356-12430b5fc45f/volumes" Sep 30 12:39:02 crc kubenswrapper[5002]: I0930 12:39:02.715978 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41e74a80-7481-4f57-a850-4ec8869b051b" path="/var/lib/kubelet/pods/41e74a80-7481-4f57-a850-4ec8869b051b/volumes" Sep 30 12:39:02 crc kubenswrapper[5002]: I0930 12:39:02.847004 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"972b29bd-a22c-486e-ac29-6d075c3b26a7","Type":"ContainerStarted","Data":"68fa6f3e16c999aceab013c26f441365f2cf9fe50aa9f39ccbb8d529c4d5f137"} Sep 30 12:39:02 crc kubenswrapper[5002]: I0930 12:39:02.847508 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Sep 30 12:39:02 crc kubenswrapper[5002]: I0930 12:39:02.849454 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1","Type":"ContainerStarted","Data":"5e3451bb7d4e6357b90125dad5f9bd0db99e24a04ca4dd761c012c386e5e12fa"} Sep 30 12:39:02 crc kubenswrapper[5002]: I0930 12:39:02.851872 5002 generic.go:334] "Generic (PLEG): container finished" podID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerID="9886eed05d26a67a26421b788732fc6013ba20fcc19e4bc6732960e19d9a03e8" exitCode=0 Sep 30 12:39:02 crc kubenswrapper[5002]: I0930 12:39:02.851950 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerDied","Data":"9886eed05d26a67a26421b788732fc6013ba20fcc19e4bc6732960e19d9a03e8"} Sep 30 12:39:02 crc kubenswrapper[5002]: I0930 12:39:02.852406 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerStarted","Data":"5f3022bd0e514b5d4d606e295722375190cfc36100f55199d7e8623cc30f07d7"} Sep 30 12:39:02 crc kubenswrapper[5002]: I0930 12:39:02.852453 5002 scope.go:117] "RemoveContainer" containerID="c1160f56b0e9b4bf2e03936ba3d26a4bdb21744142f5cdbd024372eb9b5cd6dd" Sep 30 12:39:02 crc kubenswrapper[5002]: I0930 12:39:02.866958 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.866938914 podStartE2EDuration="2.866938914s" podCreationTimestamp="2025-09-30 12:39:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:39:02.863361515 +0000 UTC m=+1117.113043671" watchObservedRunningTime="2025-09-30 12:39:02.866938914 +0000 UTC m=+1117.116621060" Sep 30 12:39:03 crc kubenswrapper[5002]: I0930 12:39:03.874951 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1","Type":"ContainerStarted","Data":"9f4340c61476e7e180916ae426077ec083f5f78303aab7b1bea55eb501713d9c"} Sep 30 12:39:04 crc kubenswrapper[5002]: I0930 12:39:04.898662 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1","Type":"ContainerStarted","Data":"ceb447eb0439373b34f517918bce85b8fca65d503059fe5dac95e6792c06a058"} Sep 30 12:39:05 crc kubenswrapper[5002]: I0930 12:39:05.912199 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1","Type":"ContainerStarted","Data":"3a1c14dcb593b6a010604ddc317205c2511f3c4513f3967c31e4515dde283f42"} Sep 30 12:39:05 crc kubenswrapper[5002]: I0930 12:39:05.912847 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 12:39:05 crc kubenswrapper[5002]: I0930 12:39:05.948705 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.274813716 podStartE2EDuration="5.9482089s" podCreationTimestamp="2025-09-30 12:39:00 +0000 UTC" firstStartedPulling="2025-09-30 12:39:01.743058066 +0000 UTC m=+1115.992740212" lastFinishedPulling="2025-09-30 12:39:05.41645326 +0000 UTC m=+1119.666135396" observedRunningTime="2025-09-30 12:39:05.93876125 +0000 UTC m=+1120.188443406" watchObservedRunningTime="2025-09-30 12:39:05.9482089 +0000 UTC m=+1120.197891066" Sep 30 12:39:11 crc kubenswrapper[5002]: I0930 12:39:11.320557 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Sep 30 12:39:11 crc kubenswrapper[5002]: I0930 12:39:11.970316 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-8kcwc"] Sep 30 12:39:11 crc kubenswrapper[5002]: I0930 12:39:11.972385 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-8kcwc" Sep 30 12:39:11 crc kubenswrapper[5002]: I0930 12:39:11.979964 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Sep 30 12:39:11 crc kubenswrapper[5002]: I0930 12:39:11.979966 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Sep 30 12:39:11 crc kubenswrapper[5002]: I0930 12:39:11.993821 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-8kcwc"] Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.094908 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd4d414b-cd09-4feb-913f-174f17996cd1-config-data\") pod \"nova-cell0-cell-mapping-8kcwc\" (UID: \"cd4d414b-cd09-4feb-913f-174f17996cd1\") " pod="openstack/nova-cell0-cell-mapping-8kcwc" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.094981 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd4d414b-cd09-4feb-913f-174f17996cd1-scripts\") pod \"nova-cell0-cell-mapping-8kcwc\" (UID: \"cd4d414b-cd09-4feb-913f-174f17996cd1\") " pod="openstack/nova-cell0-cell-mapping-8kcwc" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.095056 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd4d414b-cd09-4feb-913f-174f17996cd1-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-8kcwc\" (UID: \"cd4d414b-cd09-4feb-913f-174f17996cd1\") " pod="openstack/nova-cell0-cell-mapping-8kcwc" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.095119 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcs25\" (UniqueName: \"kubernetes.io/projected/cd4d414b-cd09-4feb-913f-174f17996cd1-kube-api-access-bcs25\") pod \"nova-cell0-cell-mapping-8kcwc\" (UID: \"cd4d414b-cd09-4feb-913f-174f17996cd1\") " pod="openstack/nova-cell0-cell-mapping-8kcwc" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.119510 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.125901 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.127976 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.140577 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.193448 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.194649 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.196629 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcs25\" (UniqueName: \"kubernetes.io/projected/cd4d414b-cd09-4feb-913f-174f17996cd1-kube-api-access-bcs25\") pod \"nova-cell0-cell-mapping-8kcwc\" (UID: \"cd4d414b-cd09-4feb-913f-174f17996cd1\") " pod="openstack/nova-cell0-cell-mapping-8kcwc" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.196705 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6bdc68b-5ea5-468d-bd40-e0c04652a581-config-data\") pod \"nova-api-0\" (UID: \"f6bdc68b-5ea5-468d-bd40-e0c04652a581\") " pod="openstack/nova-api-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.196743 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5dgq\" (UniqueName: \"kubernetes.io/projected/f6bdc68b-5ea5-468d-bd40-e0c04652a581-kube-api-access-r5dgq\") pod \"nova-api-0\" (UID: \"f6bdc68b-5ea5-468d-bd40-e0c04652a581\") " pod="openstack/nova-api-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.196765 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6bdc68b-5ea5-468d-bd40-e0c04652a581-logs\") pod \"nova-api-0\" (UID: \"f6bdc68b-5ea5-468d-bd40-e0c04652a581\") " pod="openstack/nova-api-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.196805 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6bdc68b-5ea5-468d-bd40-e0c04652a581-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f6bdc68b-5ea5-468d-bd40-e0c04652a581\") " pod="openstack/nova-api-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.196853 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd4d414b-cd09-4feb-913f-174f17996cd1-config-data\") pod \"nova-cell0-cell-mapping-8kcwc\" (UID: \"cd4d414b-cd09-4feb-913f-174f17996cd1\") " pod="openstack/nova-cell0-cell-mapping-8kcwc" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.196887 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd4d414b-cd09-4feb-913f-174f17996cd1-scripts\") pod \"nova-cell0-cell-mapping-8kcwc\" (UID: \"cd4d414b-cd09-4feb-913f-174f17996cd1\") " pod="openstack/nova-cell0-cell-mapping-8kcwc" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.196915 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd4d414b-cd09-4feb-913f-174f17996cd1-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-8kcwc\" (UID: \"cd4d414b-cd09-4feb-913f-174f17996cd1\") " pod="openstack/nova-cell0-cell-mapping-8kcwc" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.202437 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.203097 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd4d414b-cd09-4feb-913f-174f17996cd1-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-8kcwc\" (UID: \"cd4d414b-cd09-4feb-913f-174f17996cd1\") " pod="openstack/nova-cell0-cell-mapping-8kcwc" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.206231 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd4d414b-cd09-4feb-913f-174f17996cd1-config-data\") pod \"nova-cell0-cell-mapping-8kcwc\" (UID: \"cd4d414b-cd09-4feb-913f-174f17996cd1\") " pod="openstack/nova-cell0-cell-mapping-8kcwc" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.210277 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd4d414b-cd09-4feb-913f-174f17996cd1-scripts\") pod \"nova-cell0-cell-mapping-8kcwc\" (UID: \"cd4d414b-cd09-4feb-913f-174f17996cd1\") " pod="openstack/nova-cell0-cell-mapping-8kcwc" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.214837 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.226338 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcs25\" (UniqueName: \"kubernetes.io/projected/cd4d414b-cd09-4feb-913f-174f17996cd1-kube-api-access-bcs25\") pod \"nova-cell0-cell-mapping-8kcwc\" (UID: \"cd4d414b-cd09-4feb-913f-174f17996cd1\") " pod="openstack/nova-cell0-cell-mapping-8kcwc" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.298097 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6bdc68b-5ea5-468d-bd40-e0c04652a581-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f6bdc68b-5ea5-468d-bd40-e0c04652a581\") " pod="openstack/nova-api-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.298166 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmmwj\" (UniqueName: \"kubernetes.io/projected/7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09-kube-api-access-tmmwj\") pod \"nova-cell1-novncproxy-0\" (UID: \"7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.298241 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.298270 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.298328 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6bdc68b-5ea5-468d-bd40-e0c04652a581-config-data\") pod \"nova-api-0\" (UID: \"f6bdc68b-5ea5-468d-bd40-e0c04652a581\") " pod="openstack/nova-api-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.298382 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5dgq\" (UniqueName: \"kubernetes.io/projected/f6bdc68b-5ea5-468d-bd40-e0c04652a581-kube-api-access-r5dgq\") pod \"nova-api-0\" (UID: \"f6bdc68b-5ea5-468d-bd40-e0c04652a581\") " pod="openstack/nova-api-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.298412 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6bdc68b-5ea5-468d-bd40-e0c04652a581-logs\") pod \"nova-api-0\" (UID: \"f6bdc68b-5ea5-468d-bd40-e0c04652a581\") " pod="openstack/nova-api-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.298973 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6bdc68b-5ea5-468d-bd40-e0c04652a581-logs\") pod \"nova-api-0\" (UID: \"f6bdc68b-5ea5-468d-bd40-e0c04652a581\") " pod="openstack/nova-api-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.299353 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-8kcwc" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.303222 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6bdc68b-5ea5-468d-bd40-e0c04652a581-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f6bdc68b-5ea5-468d-bd40-e0c04652a581\") " pod="openstack/nova-api-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.307297 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6bdc68b-5ea5-468d-bd40-e0c04652a581-config-data\") pod \"nova-api-0\" (UID: \"f6bdc68b-5ea5-468d-bd40-e0c04652a581\") " pod="openstack/nova-api-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.320669 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.322194 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.326907 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.328015 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.334213 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.334403 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.367149 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5dgq\" (UniqueName: \"kubernetes.io/projected/f6bdc68b-5ea5-468d-bd40-e0c04652a581-kube-api-access-r5dgq\") pod \"nova-api-0\" (UID: \"f6bdc68b-5ea5-468d-bd40-e0c04652a581\") " pod="openstack/nova-api-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.374731 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.393847 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.401419 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9c49bf4-915c-4efb-81ed-b8c7a393d371-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d9c49bf4-915c-4efb-81ed-b8c7a393d371\") " pod="openstack/nova-scheduler-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.401529 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b186605c-aa7e-4131-8fa2-5e8d97789e8b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b186605c-aa7e-4131-8fa2-5e8d97789e8b\") " pod="openstack/nova-metadata-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.401548 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2pjf7\" (UniqueName: \"kubernetes.io/projected/d9c49bf4-915c-4efb-81ed-b8c7a393d371-kube-api-access-2pjf7\") pod \"nova-scheduler-0\" (UID: \"d9c49bf4-915c-4efb-81ed-b8c7a393d371\") " pod="openstack/nova-scheduler-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.401563 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9c49bf4-915c-4efb-81ed-b8c7a393d371-config-data\") pod \"nova-scheduler-0\" (UID: \"d9c49bf4-915c-4efb-81ed-b8c7a393d371\") " pod="openstack/nova-scheduler-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.401585 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmmwj\" (UniqueName: \"kubernetes.io/projected/7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09-kube-api-access-tmmwj\") pod \"nova-cell1-novncproxy-0\" (UID: \"7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.401602 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b186605c-aa7e-4131-8fa2-5e8d97789e8b-config-data\") pod \"nova-metadata-0\" (UID: \"b186605c-aa7e-4131-8fa2-5e8d97789e8b\") " pod="openstack/nova-metadata-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.401676 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.401697 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b186605c-aa7e-4131-8fa2-5e8d97789e8b-logs\") pod \"nova-metadata-0\" (UID: \"b186605c-aa7e-4131-8fa2-5e8d97789e8b\") " pod="openstack/nova-metadata-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.401716 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pz7ss\" (UniqueName: \"kubernetes.io/projected/b186605c-aa7e-4131-8fa2-5e8d97789e8b-kube-api-access-pz7ss\") pod \"nova-metadata-0\" (UID: \"b186605c-aa7e-4131-8fa2-5e8d97789e8b\") " pod="openstack/nova-metadata-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.401740 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.407863 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.409869 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.434125 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmmwj\" (UniqueName: \"kubernetes.io/projected/7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09-kube-api-access-tmmwj\") pod \"nova-cell1-novncproxy-0\" (UID: \"7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.443102 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.451422 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-n66v8"] Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.453090 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.477408 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-n66v8"] Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.483429 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.503092 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b186605c-aa7e-4131-8fa2-5e8d97789e8b-logs\") pod \"nova-metadata-0\" (UID: \"b186605c-aa7e-4131-8fa2-5e8d97789e8b\") " pod="openstack/nova-metadata-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.503331 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pz7ss\" (UniqueName: \"kubernetes.io/projected/b186605c-aa7e-4131-8fa2-5e8d97789e8b-kube-api-access-pz7ss\") pod \"nova-metadata-0\" (UID: \"b186605c-aa7e-4131-8fa2-5e8d97789e8b\") " pod="openstack/nova-metadata-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.503488 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-n66v8\" (UID: \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\") " pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.503590 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9c49bf4-915c-4efb-81ed-b8c7a393d371-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d9c49bf4-915c-4efb-81ed-b8c7a393d371\") " pod="openstack/nova-scheduler-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.503670 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-n66v8\" (UID: \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\") " pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.503820 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b186605c-aa7e-4131-8fa2-5e8d97789e8b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b186605c-aa7e-4131-8fa2-5e8d97789e8b\") " pod="openstack/nova-metadata-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.503897 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2pjf7\" (UniqueName: \"kubernetes.io/projected/d9c49bf4-915c-4efb-81ed-b8c7a393d371-kube-api-access-2pjf7\") pod \"nova-scheduler-0\" (UID: \"d9c49bf4-915c-4efb-81ed-b8c7a393d371\") " pod="openstack/nova-scheduler-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.503960 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9c49bf4-915c-4efb-81ed-b8c7a393d371-config-data\") pod \"nova-scheduler-0\" (UID: \"d9c49bf4-915c-4efb-81ed-b8c7a393d371\") " pod="openstack/nova-scheduler-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.504039 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-config\") pod \"dnsmasq-dns-845d6d6f59-n66v8\" (UID: \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\") " pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.504611 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b186605c-aa7e-4131-8fa2-5e8d97789e8b-config-data\") pod \"nova-metadata-0\" (UID: \"b186605c-aa7e-4131-8fa2-5e8d97789e8b\") " pod="openstack/nova-metadata-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.504776 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-n66v8\" (UID: \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\") " pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.504894 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6k7rn\" (UniqueName: \"kubernetes.io/projected/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-kube-api-access-6k7rn\") pod \"dnsmasq-dns-845d6d6f59-n66v8\" (UID: \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\") " pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.505009 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-n66v8\" (UID: \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\") " pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.506131 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b186605c-aa7e-4131-8fa2-5e8d97789e8b-logs\") pod \"nova-metadata-0\" (UID: \"b186605c-aa7e-4131-8fa2-5e8d97789e8b\") " pod="openstack/nova-metadata-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.512112 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9c49bf4-915c-4efb-81ed-b8c7a393d371-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d9c49bf4-915c-4efb-81ed-b8c7a393d371\") " pod="openstack/nova-scheduler-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.516537 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9c49bf4-915c-4efb-81ed-b8c7a393d371-config-data\") pod \"nova-scheduler-0\" (UID: \"d9c49bf4-915c-4efb-81ed-b8c7a393d371\") " pod="openstack/nova-scheduler-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.521646 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b186605c-aa7e-4131-8fa2-5e8d97789e8b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b186605c-aa7e-4131-8fa2-5e8d97789e8b\") " pod="openstack/nova-metadata-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.525535 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2pjf7\" (UniqueName: \"kubernetes.io/projected/d9c49bf4-915c-4efb-81ed-b8c7a393d371-kube-api-access-2pjf7\") pod \"nova-scheduler-0\" (UID: \"d9c49bf4-915c-4efb-81ed-b8c7a393d371\") " pod="openstack/nova-scheduler-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.527203 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b186605c-aa7e-4131-8fa2-5e8d97789e8b-config-data\") pod \"nova-metadata-0\" (UID: \"b186605c-aa7e-4131-8fa2-5e8d97789e8b\") " pod="openstack/nova-metadata-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.529045 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pz7ss\" (UniqueName: \"kubernetes.io/projected/b186605c-aa7e-4131-8fa2-5e8d97789e8b-kube-api-access-pz7ss\") pod \"nova-metadata-0\" (UID: \"b186605c-aa7e-4131-8fa2-5e8d97789e8b\") " pod="openstack/nova-metadata-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.530429 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.606267 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-config\") pod \"dnsmasq-dns-845d6d6f59-n66v8\" (UID: \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\") " pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.607070 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-n66v8\" (UID: \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\") " pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.607130 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6k7rn\" (UniqueName: \"kubernetes.io/projected/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-kube-api-access-6k7rn\") pod \"dnsmasq-dns-845d6d6f59-n66v8\" (UID: \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\") " pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.607193 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-n66v8\" (UID: \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\") " pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.607508 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-n66v8\" (UID: \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\") " pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.607635 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-n66v8\" (UID: \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\") " pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.607851 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-config\") pod \"dnsmasq-dns-845d6d6f59-n66v8\" (UID: \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\") " pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.608654 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-n66v8\" (UID: \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\") " pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.609028 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-n66v8\" (UID: \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\") " pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.610240 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-n66v8\" (UID: \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\") " pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.613263 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-n66v8\" (UID: \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\") " pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.637440 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6k7rn\" (UniqueName: \"kubernetes.io/projected/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-kube-api-access-6k7rn\") pod \"dnsmasq-dns-845d6d6f59-n66v8\" (UID: \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\") " pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.740085 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-8kcwc"] Sep 30 12:39:12 crc kubenswrapper[5002]: W0930 12:39:12.751600 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcd4d414b_cd09_4feb_913f_174f17996cd1.slice/crio-8ab0b99967b51121d912446d9452b8329fe1d36db8124f5f9b28cf6249eb5ea8 WatchSource:0}: Error finding container 8ab0b99967b51121d912446d9452b8329fe1d36db8124f5f9b28cf6249eb5ea8: Status 404 returned error can't find the container with id 8ab0b99967b51121d912446d9452b8329fe1d36db8124f5f9b28cf6249eb5ea8 Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.799039 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 12:39:12 crc kubenswrapper[5002]: I0930 12:39:12.839887 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" Sep 30 12:39:13 crc kubenswrapper[5002]: I0930 12:39:13.012673 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-8kcwc" event={"ID":"cd4d414b-cd09-4feb-913f-174f17996cd1","Type":"ContainerStarted","Data":"8ab0b99967b51121d912446d9452b8329fe1d36db8124f5f9b28cf6249eb5ea8"} Sep 30 12:39:13 crc kubenswrapper[5002]: I0930 12:39:13.123150 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 12:39:13 crc kubenswrapper[5002]: I0930 12:39:13.155014 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 12:39:13 crc kubenswrapper[5002]: I0930 12:39:13.206527 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 12:39:13 crc kubenswrapper[5002]: I0930 12:39:13.235974 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6qsgx"] Sep 30 12:39:13 crc kubenswrapper[5002]: I0930 12:39:13.237144 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-6qsgx" Sep 30 12:39:13 crc kubenswrapper[5002]: I0930 12:39:13.240772 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Sep 30 12:39:13 crc kubenswrapper[5002]: I0930 12:39:13.242314 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 30 12:39:13 crc kubenswrapper[5002]: I0930 12:39:13.249189 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6qsgx"] Sep 30 12:39:13 crc kubenswrapper[5002]: I0930 12:39:13.354547 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4e8b9fc-5aff-4584-974f-ad060b75f0c3-config-data\") pod \"nova-cell1-conductor-db-sync-6qsgx\" (UID: \"c4e8b9fc-5aff-4584-974f-ad060b75f0c3\") " pod="openstack/nova-cell1-conductor-db-sync-6qsgx" Sep 30 12:39:13 crc kubenswrapper[5002]: I0930 12:39:13.354638 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4e8b9fc-5aff-4584-974f-ad060b75f0c3-scripts\") pod \"nova-cell1-conductor-db-sync-6qsgx\" (UID: \"c4e8b9fc-5aff-4584-974f-ad060b75f0c3\") " pod="openstack/nova-cell1-conductor-db-sync-6qsgx" Sep 30 12:39:13 crc kubenswrapper[5002]: I0930 12:39:13.354667 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpzgr\" (UniqueName: \"kubernetes.io/projected/c4e8b9fc-5aff-4584-974f-ad060b75f0c3-kube-api-access-tpzgr\") pod \"nova-cell1-conductor-db-sync-6qsgx\" (UID: \"c4e8b9fc-5aff-4584-974f-ad060b75f0c3\") " pod="openstack/nova-cell1-conductor-db-sync-6qsgx" Sep 30 12:39:13 crc kubenswrapper[5002]: I0930 12:39:13.354706 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4e8b9fc-5aff-4584-974f-ad060b75f0c3-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-6qsgx\" (UID: \"c4e8b9fc-5aff-4584-974f-ad060b75f0c3\") " pod="openstack/nova-cell1-conductor-db-sync-6qsgx" Sep 30 12:39:13 crc kubenswrapper[5002]: I0930 12:39:13.450116 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 12:39:13 crc kubenswrapper[5002]: I0930 12:39:13.456202 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4e8b9fc-5aff-4584-974f-ad060b75f0c3-config-data\") pod \"nova-cell1-conductor-db-sync-6qsgx\" (UID: \"c4e8b9fc-5aff-4584-974f-ad060b75f0c3\") " pod="openstack/nova-cell1-conductor-db-sync-6qsgx" Sep 30 12:39:13 crc kubenswrapper[5002]: I0930 12:39:13.456284 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4e8b9fc-5aff-4584-974f-ad060b75f0c3-scripts\") pod \"nova-cell1-conductor-db-sync-6qsgx\" (UID: \"c4e8b9fc-5aff-4584-974f-ad060b75f0c3\") " pod="openstack/nova-cell1-conductor-db-sync-6qsgx" Sep 30 12:39:13 crc kubenswrapper[5002]: I0930 12:39:13.456303 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpzgr\" (UniqueName: \"kubernetes.io/projected/c4e8b9fc-5aff-4584-974f-ad060b75f0c3-kube-api-access-tpzgr\") pod \"nova-cell1-conductor-db-sync-6qsgx\" (UID: \"c4e8b9fc-5aff-4584-974f-ad060b75f0c3\") " pod="openstack/nova-cell1-conductor-db-sync-6qsgx" Sep 30 12:39:13 crc kubenswrapper[5002]: I0930 12:39:13.456338 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4e8b9fc-5aff-4584-974f-ad060b75f0c3-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-6qsgx\" (UID: \"c4e8b9fc-5aff-4584-974f-ad060b75f0c3\") " pod="openstack/nova-cell1-conductor-db-sync-6qsgx" Sep 30 12:39:13 crc kubenswrapper[5002]: I0930 12:39:13.461930 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4e8b9fc-5aff-4584-974f-ad060b75f0c3-scripts\") pod \"nova-cell1-conductor-db-sync-6qsgx\" (UID: \"c4e8b9fc-5aff-4584-974f-ad060b75f0c3\") " pod="openstack/nova-cell1-conductor-db-sync-6qsgx" Sep 30 12:39:13 crc kubenswrapper[5002]: I0930 12:39:13.462682 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4e8b9fc-5aff-4584-974f-ad060b75f0c3-config-data\") pod \"nova-cell1-conductor-db-sync-6qsgx\" (UID: \"c4e8b9fc-5aff-4584-974f-ad060b75f0c3\") " pod="openstack/nova-cell1-conductor-db-sync-6qsgx" Sep 30 12:39:13 crc kubenswrapper[5002]: I0930 12:39:13.465172 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4e8b9fc-5aff-4584-974f-ad060b75f0c3-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-6qsgx\" (UID: \"c4e8b9fc-5aff-4584-974f-ad060b75f0c3\") " pod="openstack/nova-cell1-conductor-db-sync-6qsgx" Sep 30 12:39:13 crc kubenswrapper[5002]: I0930 12:39:13.479608 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpzgr\" (UniqueName: \"kubernetes.io/projected/c4e8b9fc-5aff-4584-974f-ad060b75f0c3-kube-api-access-tpzgr\") pod \"nova-cell1-conductor-db-sync-6qsgx\" (UID: \"c4e8b9fc-5aff-4584-974f-ad060b75f0c3\") " pod="openstack/nova-cell1-conductor-db-sync-6qsgx" Sep 30 12:39:13 crc kubenswrapper[5002]: I0930 12:39:13.603646 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-n66v8"] Sep 30 12:39:13 crc kubenswrapper[5002]: W0930 12:39:13.604132 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9aafee6a_2a42_4a59_abf5_9ac7cdd70aca.slice/crio-baf56aeed60266b14e01708d07e222f7ee859fd743f8b73fa267c542cbfb0d23 WatchSource:0}: Error finding container baf56aeed60266b14e01708d07e222f7ee859fd743f8b73fa267c542cbfb0d23: Status 404 returned error can't find the container with id baf56aeed60266b14e01708d07e222f7ee859fd743f8b73fa267c542cbfb0d23 Sep 30 12:39:13 crc kubenswrapper[5002]: I0930 12:39:13.624328 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-6qsgx" Sep 30 12:39:14 crc kubenswrapper[5002]: I0930 12:39:14.026713 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09","Type":"ContainerStarted","Data":"1c394baac9c21398515b7c0a8edab83399bb4fc45c3a318dee217ca73d29da35"} Sep 30 12:39:14 crc kubenswrapper[5002]: I0930 12:39:14.030242 5002 generic.go:334] "Generic (PLEG): container finished" podID="9aafee6a-2a42-4a59-abf5-9ac7cdd70aca" containerID="9ed648102b4434d916a0bf898ff00dd41841530a9b308463a228c82210ebcc2d" exitCode=0 Sep 30 12:39:14 crc kubenswrapper[5002]: I0930 12:39:14.030335 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" event={"ID":"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca","Type":"ContainerDied","Data":"9ed648102b4434d916a0bf898ff00dd41841530a9b308463a228c82210ebcc2d"} Sep 30 12:39:14 crc kubenswrapper[5002]: I0930 12:39:14.030362 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" event={"ID":"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca","Type":"ContainerStarted","Data":"baf56aeed60266b14e01708d07e222f7ee859fd743f8b73fa267c542cbfb0d23"} Sep 30 12:39:14 crc kubenswrapper[5002]: I0930 12:39:14.035043 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d9c49bf4-915c-4efb-81ed-b8c7a393d371","Type":"ContainerStarted","Data":"697d739af157050a5f833e1d128d8eb7393cfdb66cb46ee01bbe4575d3e11c6f"} Sep 30 12:39:14 crc kubenswrapper[5002]: I0930 12:39:14.038836 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-8kcwc" event={"ID":"cd4d414b-cd09-4feb-913f-174f17996cd1","Type":"ContainerStarted","Data":"1fb42e09be404019f6f83adc5f150f7da4c40df99782a5d5437b642079a7a848"} Sep 30 12:39:14 crc kubenswrapper[5002]: I0930 12:39:14.045770 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b186605c-aa7e-4131-8fa2-5e8d97789e8b","Type":"ContainerStarted","Data":"aec2316fc906a03c25b3727769a804e55407c3a8056b2145424b7f070805a4d6"} Sep 30 12:39:14 crc kubenswrapper[5002]: I0930 12:39:14.047404 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f6bdc68b-5ea5-468d-bd40-e0c04652a581","Type":"ContainerStarted","Data":"f7410c791cf59d6a0cb4baefb59141e90799fc06abd4aec490d6774bd152fade"} Sep 30 12:39:14 crc kubenswrapper[5002]: I0930 12:39:14.084893 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6qsgx"] Sep 30 12:39:14 crc kubenswrapper[5002]: I0930 12:39:14.090941 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-8kcwc" podStartSLOduration=3.090920491 podStartE2EDuration="3.090920491s" podCreationTimestamp="2025-09-30 12:39:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:39:14.072506772 +0000 UTC m=+1128.322188928" watchObservedRunningTime="2025-09-30 12:39:14.090920491 +0000 UTC m=+1128.340602637" Sep 30 12:39:15 crc kubenswrapper[5002]: I0930 12:39:15.061431 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-6qsgx" event={"ID":"c4e8b9fc-5aff-4584-974f-ad060b75f0c3","Type":"ContainerStarted","Data":"0b2e9bbf92e51a3d7e2593be1b5ed29afadceb9b66b7a114d28b4ba438b71372"} Sep 30 12:39:15 crc kubenswrapper[5002]: I0930 12:39:15.061791 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-6qsgx" event={"ID":"c4e8b9fc-5aff-4584-974f-ad060b75f0c3","Type":"ContainerStarted","Data":"4473eed15ed841d4fbb3d9de3dd9cd5789c6228945323b5545cbea6299e5a04c"} Sep 30 12:39:15 crc kubenswrapper[5002]: I0930 12:39:15.064828 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" event={"ID":"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca","Type":"ContainerStarted","Data":"bbf3996d83b99e8eff6ef68b22b497edb43c2058697c2b76f69ce6ee22cb392d"} Sep 30 12:39:15 crc kubenswrapper[5002]: I0930 12:39:15.118932 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" podStartSLOduration=3.118905891 podStartE2EDuration="3.118905891s" podCreationTimestamp="2025-09-30 12:39:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:39:15.108914305 +0000 UTC m=+1129.358596481" watchObservedRunningTime="2025-09-30 12:39:15.118905891 +0000 UTC m=+1129.368588037" Sep 30 12:39:15 crc kubenswrapper[5002]: I0930 12:39:15.124149 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-6qsgx" podStartSLOduration=2.124130455 podStartE2EDuration="2.124130455s" podCreationTimestamp="2025-09-30 12:39:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:39:15.084872571 +0000 UTC m=+1129.334554717" watchObservedRunningTime="2025-09-30 12:39:15.124130455 +0000 UTC m=+1129.373812601" Sep 30 12:39:16 crc kubenswrapper[5002]: I0930 12:39:16.014080 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 12:39:16 crc kubenswrapper[5002]: I0930 12:39:16.020122 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 12:39:16 crc kubenswrapper[5002]: I0930 12:39:16.073044 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" Sep 30 12:39:17 crc kubenswrapper[5002]: I0930 12:39:17.086880 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f6bdc68b-5ea5-468d-bd40-e0c04652a581","Type":"ContainerStarted","Data":"34d2fc4ed30de1144e68bb88be6d1aaed0105f0d96ec1bbf05530afd14bc4427"} Sep 30 12:39:17 crc kubenswrapper[5002]: I0930 12:39:17.087221 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f6bdc68b-5ea5-468d-bd40-e0c04652a581","Type":"ContainerStarted","Data":"7b8086bab6889bdc08ab3652a321373821a4c214087e8c8d36de3105d5b111ba"} Sep 30 12:39:17 crc kubenswrapper[5002]: I0930 12:39:17.088646 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09","Type":"ContainerStarted","Data":"66188d42a7f481568dd66d6eca5ad90d8ee5cc340f9ccfcff45d059c45326067"} Sep 30 12:39:17 crc kubenswrapper[5002]: I0930 12:39:17.088844 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://66188d42a7f481568dd66d6eca5ad90d8ee5cc340f9ccfcff45d059c45326067" gracePeriod=30 Sep 30 12:39:17 crc kubenswrapper[5002]: I0930 12:39:17.090963 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d9c49bf4-915c-4efb-81ed-b8c7a393d371","Type":"ContainerStarted","Data":"5590adf8564ec87f88f4ab86615dc85efc1aefc270ae86356444dff4296da921"} Sep 30 12:39:17 crc kubenswrapper[5002]: I0930 12:39:17.109102 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b186605c-aa7e-4131-8fa2-5e8d97789e8b" containerName="nova-metadata-log" containerID="cri-o://58e8b198ffc552738bab9d546d08423cbec6414761a39c8f8139ecf324bc4de6" gracePeriod=30 Sep 30 12:39:17 crc kubenswrapper[5002]: I0930 12:39:17.109450 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b186605c-aa7e-4131-8fa2-5e8d97789e8b","Type":"ContainerStarted","Data":"58bbd10865923b3fc67670c08f55a61ede8e57660ab3137ce388b1dac102b308"} Sep 30 12:39:17 crc kubenswrapper[5002]: I0930 12:39:17.109502 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b186605c-aa7e-4131-8fa2-5e8d97789e8b","Type":"ContainerStarted","Data":"58e8b198ffc552738bab9d546d08423cbec6414761a39c8f8139ecf324bc4de6"} Sep 30 12:39:17 crc kubenswrapper[5002]: I0930 12:39:17.109559 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b186605c-aa7e-4131-8fa2-5e8d97789e8b" containerName="nova-metadata-metadata" containerID="cri-o://58bbd10865923b3fc67670c08f55a61ede8e57660ab3137ce388b1dac102b308" gracePeriod=30 Sep 30 12:39:17 crc kubenswrapper[5002]: I0930 12:39:17.131073 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.041804264 podStartE2EDuration="5.131051391s" podCreationTimestamp="2025-09-30 12:39:12 +0000 UTC" firstStartedPulling="2025-09-30 12:39:13.164059082 +0000 UTC m=+1127.413741218" lastFinishedPulling="2025-09-30 12:39:16.253306199 +0000 UTC m=+1130.502988345" observedRunningTime="2025-09-30 12:39:17.115342487 +0000 UTC m=+1131.365024643" watchObservedRunningTime="2025-09-30 12:39:17.131051391 +0000 UTC m=+1131.380733537" Sep 30 12:39:17 crc kubenswrapper[5002]: I0930 12:39:17.163205 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.063505143 podStartE2EDuration="5.163187348s" podCreationTimestamp="2025-09-30 12:39:12 +0000 UTC" firstStartedPulling="2025-09-30 12:39:13.145720026 +0000 UTC m=+1127.395402172" lastFinishedPulling="2025-09-30 12:39:16.245402231 +0000 UTC m=+1130.495084377" observedRunningTime="2025-09-30 12:39:17.159483926 +0000 UTC m=+1131.409166072" watchObservedRunningTime="2025-09-30 12:39:17.163187348 +0000 UTC m=+1131.412869494" Sep 30 12:39:17 crc kubenswrapper[5002]: I0930 12:39:17.174603 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.37993414 podStartE2EDuration="5.174584034s" podCreationTimestamp="2025-09-30 12:39:12 +0000 UTC" firstStartedPulling="2025-09-30 12:39:13.449270526 +0000 UTC m=+1127.698952672" lastFinishedPulling="2025-09-30 12:39:16.24392042 +0000 UTC m=+1130.493602566" observedRunningTime="2025-09-30 12:39:17.14331996 +0000 UTC m=+1131.393002106" watchObservedRunningTime="2025-09-30 12:39:17.174584034 +0000 UTC m=+1131.424266180" Sep 30 12:39:17 crc kubenswrapper[5002]: I0930 12:39:17.181927 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.163282369 podStartE2EDuration="5.181910216s" podCreationTimestamp="2025-09-30 12:39:12 +0000 UTC" firstStartedPulling="2025-09-30 12:39:13.225077177 +0000 UTC m=+1127.474759323" lastFinishedPulling="2025-09-30 12:39:16.243705024 +0000 UTC m=+1130.493387170" observedRunningTime="2025-09-30 12:39:17.178700176 +0000 UTC m=+1131.428382322" watchObservedRunningTime="2025-09-30 12:39:17.181910216 +0000 UTC m=+1131.431592362" Sep 30 12:39:17 crc kubenswrapper[5002]: I0930 12:39:17.484317 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:17 crc kubenswrapper[5002]: I0930 12:39:17.531116 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 12:39:17 crc kubenswrapper[5002]: I0930 12:39:17.800578 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 12:39:17 crc kubenswrapper[5002]: I0930 12:39:17.800954 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 12:39:17 crc kubenswrapper[5002]: I0930 12:39:17.969235 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.049063 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b186605c-aa7e-4131-8fa2-5e8d97789e8b-logs\") pod \"b186605c-aa7e-4131-8fa2-5e8d97789e8b\" (UID: \"b186605c-aa7e-4131-8fa2-5e8d97789e8b\") " Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.049167 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b186605c-aa7e-4131-8fa2-5e8d97789e8b-config-data\") pod \"b186605c-aa7e-4131-8fa2-5e8d97789e8b\" (UID: \"b186605c-aa7e-4131-8fa2-5e8d97789e8b\") " Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.049316 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b186605c-aa7e-4131-8fa2-5e8d97789e8b-combined-ca-bundle\") pod \"b186605c-aa7e-4131-8fa2-5e8d97789e8b\" (UID: \"b186605c-aa7e-4131-8fa2-5e8d97789e8b\") " Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.049381 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pz7ss\" (UniqueName: \"kubernetes.io/projected/b186605c-aa7e-4131-8fa2-5e8d97789e8b-kube-api-access-pz7ss\") pod \"b186605c-aa7e-4131-8fa2-5e8d97789e8b\" (UID: \"b186605c-aa7e-4131-8fa2-5e8d97789e8b\") " Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.050081 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b186605c-aa7e-4131-8fa2-5e8d97789e8b-logs" (OuterVolumeSpecName: "logs") pod "b186605c-aa7e-4131-8fa2-5e8d97789e8b" (UID: "b186605c-aa7e-4131-8fa2-5e8d97789e8b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.073863 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b186605c-aa7e-4131-8fa2-5e8d97789e8b-kube-api-access-pz7ss" (OuterVolumeSpecName: "kube-api-access-pz7ss") pod "b186605c-aa7e-4131-8fa2-5e8d97789e8b" (UID: "b186605c-aa7e-4131-8fa2-5e8d97789e8b"). InnerVolumeSpecName "kube-api-access-pz7ss". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.084167 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b186605c-aa7e-4131-8fa2-5e8d97789e8b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b186605c-aa7e-4131-8fa2-5e8d97789e8b" (UID: "b186605c-aa7e-4131-8fa2-5e8d97789e8b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.088728 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b186605c-aa7e-4131-8fa2-5e8d97789e8b-config-data" (OuterVolumeSpecName: "config-data") pod "b186605c-aa7e-4131-8fa2-5e8d97789e8b" (UID: "b186605c-aa7e-4131-8fa2-5e8d97789e8b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.118772 5002 generic.go:334] "Generic (PLEG): container finished" podID="b186605c-aa7e-4131-8fa2-5e8d97789e8b" containerID="58bbd10865923b3fc67670c08f55a61ede8e57660ab3137ce388b1dac102b308" exitCode=0 Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.118810 5002 generic.go:334] "Generic (PLEG): container finished" podID="b186605c-aa7e-4131-8fa2-5e8d97789e8b" containerID="58e8b198ffc552738bab9d546d08423cbec6414761a39c8f8139ecf324bc4de6" exitCode=143 Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.120375 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.121603 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b186605c-aa7e-4131-8fa2-5e8d97789e8b","Type":"ContainerDied","Data":"58bbd10865923b3fc67670c08f55a61ede8e57660ab3137ce388b1dac102b308"} Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.121652 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b186605c-aa7e-4131-8fa2-5e8d97789e8b","Type":"ContainerDied","Data":"58e8b198ffc552738bab9d546d08423cbec6414761a39c8f8139ecf324bc4de6"} Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.121664 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b186605c-aa7e-4131-8fa2-5e8d97789e8b","Type":"ContainerDied","Data":"aec2316fc906a03c25b3727769a804e55407c3a8056b2145424b7f070805a4d6"} Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.121680 5002 scope.go:117] "RemoveContainer" containerID="58bbd10865923b3fc67670c08f55a61ede8e57660ab3137ce388b1dac102b308" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.152659 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b186605c-aa7e-4131-8fa2-5e8d97789e8b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.152702 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pz7ss\" (UniqueName: \"kubernetes.io/projected/b186605c-aa7e-4131-8fa2-5e8d97789e8b-kube-api-access-pz7ss\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.152714 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b186605c-aa7e-4131-8fa2-5e8d97789e8b-logs\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.152731 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b186605c-aa7e-4131-8fa2-5e8d97789e8b-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.152887 5002 scope.go:117] "RemoveContainer" containerID="58e8b198ffc552738bab9d546d08423cbec6414761a39c8f8139ecf324bc4de6" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.206388 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.213872 5002 scope.go:117] "RemoveContainer" containerID="58bbd10865923b3fc67670c08f55a61ede8e57660ab3137ce388b1dac102b308" Sep 30 12:39:18 crc kubenswrapper[5002]: E0930 12:39:18.214291 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58bbd10865923b3fc67670c08f55a61ede8e57660ab3137ce388b1dac102b308\": container with ID starting with 58bbd10865923b3fc67670c08f55a61ede8e57660ab3137ce388b1dac102b308 not found: ID does not exist" containerID="58bbd10865923b3fc67670c08f55a61ede8e57660ab3137ce388b1dac102b308" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.214347 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58bbd10865923b3fc67670c08f55a61ede8e57660ab3137ce388b1dac102b308"} err="failed to get container status \"58bbd10865923b3fc67670c08f55a61ede8e57660ab3137ce388b1dac102b308\": rpc error: code = NotFound desc = could not find container \"58bbd10865923b3fc67670c08f55a61ede8e57660ab3137ce388b1dac102b308\": container with ID starting with 58bbd10865923b3fc67670c08f55a61ede8e57660ab3137ce388b1dac102b308 not found: ID does not exist" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.214367 5002 scope.go:117] "RemoveContainer" containerID="58e8b198ffc552738bab9d546d08423cbec6414761a39c8f8139ecf324bc4de6" Sep 30 12:39:18 crc kubenswrapper[5002]: E0930 12:39:18.214611 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58e8b198ffc552738bab9d546d08423cbec6414761a39c8f8139ecf324bc4de6\": container with ID starting with 58e8b198ffc552738bab9d546d08423cbec6414761a39c8f8139ecf324bc4de6 not found: ID does not exist" containerID="58e8b198ffc552738bab9d546d08423cbec6414761a39c8f8139ecf324bc4de6" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.214627 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58e8b198ffc552738bab9d546d08423cbec6414761a39c8f8139ecf324bc4de6"} err="failed to get container status \"58e8b198ffc552738bab9d546d08423cbec6414761a39c8f8139ecf324bc4de6\": rpc error: code = NotFound desc = could not find container \"58e8b198ffc552738bab9d546d08423cbec6414761a39c8f8139ecf324bc4de6\": container with ID starting with 58e8b198ffc552738bab9d546d08423cbec6414761a39c8f8139ecf324bc4de6 not found: ID does not exist" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.214639 5002 scope.go:117] "RemoveContainer" containerID="58bbd10865923b3fc67670c08f55a61ede8e57660ab3137ce388b1dac102b308" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.222943 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58bbd10865923b3fc67670c08f55a61ede8e57660ab3137ce388b1dac102b308"} err="failed to get container status \"58bbd10865923b3fc67670c08f55a61ede8e57660ab3137ce388b1dac102b308\": rpc error: code = NotFound desc = could not find container \"58bbd10865923b3fc67670c08f55a61ede8e57660ab3137ce388b1dac102b308\": container with ID starting with 58bbd10865923b3fc67670c08f55a61ede8e57660ab3137ce388b1dac102b308 not found: ID does not exist" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.222981 5002 scope.go:117] "RemoveContainer" containerID="58e8b198ffc552738bab9d546d08423cbec6414761a39c8f8139ecf324bc4de6" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.223082 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.223966 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58e8b198ffc552738bab9d546d08423cbec6414761a39c8f8139ecf324bc4de6"} err="failed to get container status \"58e8b198ffc552738bab9d546d08423cbec6414761a39c8f8139ecf324bc4de6\": rpc error: code = NotFound desc = could not find container \"58e8b198ffc552738bab9d546d08423cbec6414761a39c8f8139ecf324bc4de6\": container with ID starting with 58e8b198ffc552738bab9d546d08423cbec6414761a39c8f8139ecf324bc4de6 not found: ID does not exist" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.235289 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 12:39:18 crc kubenswrapper[5002]: E0930 12:39:18.235788 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b186605c-aa7e-4131-8fa2-5e8d97789e8b" containerName="nova-metadata-log" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.235810 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b186605c-aa7e-4131-8fa2-5e8d97789e8b" containerName="nova-metadata-log" Sep 30 12:39:18 crc kubenswrapper[5002]: E0930 12:39:18.235842 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b186605c-aa7e-4131-8fa2-5e8d97789e8b" containerName="nova-metadata-metadata" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.235848 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b186605c-aa7e-4131-8fa2-5e8d97789e8b" containerName="nova-metadata-metadata" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.236011 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="b186605c-aa7e-4131-8fa2-5e8d97789e8b" containerName="nova-metadata-metadata" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.236026 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="b186605c-aa7e-4131-8fa2-5e8d97789e8b" containerName="nova-metadata-log" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.236972 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.239546 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.240985 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.262983 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.363645 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb4a14e7-0b7a-4157-9a98-678afa5e648f-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"eb4a14e7-0b7a-4157-9a98-678afa5e648f\") " pod="openstack/nova-metadata-0" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.363704 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb4a14e7-0b7a-4157-9a98-678afa5e648f-logs\") pod \"nova-metadata-0\" (UID: \"eb4a14e7-0b7a-4157-9a98-678afa5e648f\") " pod="openstack/nova-metadata-0" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.363770 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bv2x9\" (UniqueName: \"kubernetes.io/projected/eb4a14e7-0b7a-4157-9a98-678afa5e648f-kube-api-access-bv2x9\") pod \"nova-metadata-0\" (UID: \"eb4a14e7-0b7a-4157-9a98-678afa5e648f\") " pod="openstack/nova-metadata-0" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.364082 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb4a14e7-0b7a-4157-9a98-678afa5e648f-config-data\") pod \"nova-metadata-0\" (UID: \"eb4a14e7-0b7a-4157-9a98-678afa5e648f\") " pod="openstack/nova-metadata-0" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.364153 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb4a14e7-0b7a-4157-9a98-678afa5e648f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"eb4a14e7-0b7a-4157-9a98-678afa5e648f\") " pod="openstack/nova-metadata-0" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.466094 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb4a14e7-0b7a-4157-9a98-678afa5e648f-config-data\") pod \"nova-metadata-0\" (UID: \"eb4a14e7-0b7a-4157-9a98-678afa5e648f\") " pod="openstack/nova-metadata-0" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.466164 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb4a14e7-0b7a-4157-9a98-678afa5e648f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"eb4a14e7-0b7a-4157-9a98-678afa5e648f\") " pod="openstack/nova-metadata-0" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.466230 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb4a14e7-0b7a-4157-9a98-678afa5e648f-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"eb4a14e7-0b7a-4157-9a98-678afa5e648f\") " pod="openstack/nova-metadata-0" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.466290 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb4a14e7-0b7a-4157-9a98-678afa5e648f-logs\") pod \"nova-metadata-0\" (UID: \"eb4a14e7-0b7a-4157-9a98-678afa5e648f\") " pod="openstack/nova-metadata-0" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.466373 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bv2x9\" (UniqueName: \"kubernetes.io/projected/eb4a14e7-0b7a-4157-9a98-678afa5e648f-kube-api-access-bv2x9\") pod \"nova-metadata-0\" (UID: \"eb4a14e7-0b7a-4157-9a98-678afa5e648f\") " pod="openstack/nova-metadata-0" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.466685 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb4a14e7-0b7a-4157-9a98-678afa5e648f-logs\") pod \"nova-metadata-0\" (UID: \"eb4a14e7-0b7a-4157-9a98-678afa5e648f\") " pod="openstack/nova-metadata-0" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.469406 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb4a14e7-0b7a-4157-9a98-678afa5e648f-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"eb4a14e7-0b7a-4157-9a98-678afa5e648f\") " pod="openstack/nova-metadata-0" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.470274 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb4a14e7-0b7a-4157-9a98-678afa5e648f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"eb4a14e7-0b7a-4157-9a98-678afa5e648f\") " pod="openstack/nova-metadata-0" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.474673 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb4a14e7-0b7a-4157-9a98-678afa5e648f-config-data\") pod \"nova-metadata-0\" (UID: \"eb4a14e7-0b7a-4157-9a98-678afa5e648f\") " pod="openstack/nova-metadata-0" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.487039 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bv2x9\" (UniqueName: \"kubernetes.io/projected/eb4a14e7-0b7a-4157-9a98-678afa5e648f-kube-api-access-bv2x9\") pod \"nova-metadata-0\" (UID: \"eb4a14e7-0b7a-4157-9a98-678afa5e648f\") " pod="openstack/nova-metadata-0" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.616051 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 12:39:18 crc kubenswrapper[5002]: I0930 12:39:18.694410 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b186605c-aa7e-4131-8fa2-5e8d97789e8b" path="/var/lib/kubelet/pods/b186605c-aa7e-4131-8fa2-5e8d97789e8b/volumes" Sep 30 12:39:19 crc kubenswrapper[5002]: I0930 12:39:19.152767 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 12:39:20 crc kubenswrapper[5002]: I0930 12:39:20.155867 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"eb4a14e7-0b7a-4157-9a98-678afa5e648f","Type":"ContainerStarted","Data":"481e8ee3670a25b9d235d5a11ddb97f336738396ae5988f10cb9eaa33583035b"} Sep 30 12:39:20 crc kubenswrapper[5002]: I0930 12:39:20.156164 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"eb4a14e7-0b7a-4157-9a98-678afa5e648f","Type":"ContainerStarted","Data":"803d39cd85d20fea5e2b7e535994c5ef77ab29ecfa07f69a6156e8ab901179cc"} Sep 30 12:39:20 crc kubenswrapper[5002]: I0930 12:39:20.156178 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"eb4a14e7-0b7a-4157-9a98-678afa5e648f","Type":"ContainerStarted","Data":"fffbeb2b0d3299f024d32c876e3f45d57a819a4f98a1426342101309497a61df"} Sep 30 12:39:20 crc kubenswrapper[5002]: I0930 12:39:20.185129 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.185101247 podStartE2EDuration="2.185101247s" podCreationTimestamp="2025-09-30 12:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:39:20.171547003 +0000 UTC m=+1134.421229179" watchObservedRunningTime="2025-09-30 12:39:20.185101247 +0000 UTC m=+1134.434783403" Sep 30 12:39:21 crc kubenswrapper[5002]: I0930 12:39:21.168014 5002 generic.go:334] "Generic (PLEG): container finished" podID="cd4d414b-cd09-4feb-913f-174f17996cd1" containerID="1fb42e09be404019f6f83adc5f150f7da4c40df99782a5d5437b642079a7a848" exitCode=0 Sep 30 12:39:21 crc kubenswrapper[5002]: I0930 12:39:21.168142 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-8kcwc" event={"ID":"cd4d414b-cd09-4feb-913f-174f17996cd1","Type":"ContainerDied","Data":"1fb42e09be404019f6f83adc5f150f7da4c40df99782a5d5437b642079a7a848"} Sep 30 12:39:22 crc kubenswrapper[5002]: I0930 12:39:22.444329 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 12:39:22 crc kubenswrapper[5002]: I0930 12:39:22.444657 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 12:39:22 crc kubenswrapper[5002]: I0930 12:39:22.491192 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-8kcwc" Sep 30 12:39:22 crc kubenswrapper[5002]: I0930 12:39:22.531214 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 30 12:39:22 crc kubenswrapper[5002]: I0930 12:39:22.540050 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bcs25\" (UniqueName: \"kubernetes.io/projected/cd4d414b-cd09-4feb-913f-174f17996cd1-kube-api-access-bcs25\") pod \"cd4d414b-cd09-4feb-913f-174f17996cd1\" (UID: \"cd4d414b-cd09-4feb-913f-174f17996cd1\") " Sep 30 12:39:22 crc kubenswrapper[5002]: I0930 12:39:22.540112 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd4d414b-cd09-4feb-913f-174f17996cd1-scripts\") pod \"cd4d414b-cd09-4feb-913f-174f17996cd1\" (UID: \"cd4d414b-cd09-4feb-913f-174f17996cd1\") " Sep 30 12:39:22 crc kubenswrapper[5002]: I0930 12:39:22.540330 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd4d414b-cd09-4feb-913f-174f17996cd1-combined-ca-bundle\") pod \"cd4d414b-cd09-4feb-913f-174f17996cd1\" (UID: \"cd4d414b-cd09-4feb-913f-174f17996cd1\") " Sep 30 12:39:22 crc kubenswrapper[5002]: I0930 12:39:22.540403 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd4d414b-cd09-4feb-913f-174f17996cd1-config-data\") pod \"cd4d414b-cd09-4feb-913f-174f17996cd1\" (UID: \"cd4d414b-cd09-4feb-913f-174f17996cd1\") " Sep 30 12:39:22 crc kubenswrapper[5002]: I0930 12:39:22.548985 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd4d414b-cd09-4feb-913f-174f17996cd1-scripts" (OuterVolumeSpecName: "scripts") pod "cd4d414b-cd09-4feb-913f-174f17996cd1" (UID: "cd4d414b-cd09-4feb-913f-174f17996cd1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:22 crc kubenswrapper[5002]: I0930 12:39:22.563190 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 30 12:39:22 crc kubenswrapper[5002]: I0930 12:39:22.564871 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd4d414b-cd09-4feb-913f-174f17996cd1-kube-api-access-bcs25" (OuterVolumeSpecName: "kube-api-access-bcs25") pod "cd4d414b-cd09-4feb-913f-174f17996cd1" (UID: "cd4d414b-cd09-4feb-913f-174f17996cd1"). InnerVolumeSpecName "kube-api-access-bcs25". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:39:22 crc kubenswrapper[5002]: I0930 12:39:22.575021 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd4d414b-cd09-4feb-913f-174f17996cd1-config-data" (OuterVolumeSpecName: "config-data") pod "cd4d414b-cd09-4feb-913f-174f17996cd1" (UID: "cd4d414b-cd09-4feb-913f-174f17996cd1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:22 crc kubenswrapper[5002]: I0930 12:39:22.584885 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd4d414b-cd09-4feb-913f-174f17996cd1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cd4d414b-cd09-4feb-913f-174f17996cd1" (UID: "cd4d414b-cd09-4feb-913f-174f17996cd1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:22 crc kubenswrapper[5002]: I0930 12:39:22.643156 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bcs25\" (UniqueName: \"kubernetes.io/projected/cd4d414b-cd09-4feb-913f-174f17996cd1-kube-api-access-bcs25\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:22 crc kubenswrapper[5002]: I0930 12:39:22.643189 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd4d414b-cd09-4feb-913f-174f17996cd1-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:22 crc kubenswrapper[5002]: I0930 12:39:22.643201 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd4d414b-cd09-4feb-913f-174f17996cd1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:22 crc kubenswrapper[5002]: I0930 12:39:22.643211 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd4d414b-cd09-4feb-913f-174f17996cd1-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:22 crc kubenswrapper[5002]: I0930 12:39:22.841634 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" Sep 30 12:39:22 crc kubenswrapper[5002]: I0930 12:39:22.909208 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-cq2vq"] Sep 30 12:39:22 crc kubenswrapper[5002]: I0930 12:39:22.909440 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" podUID="fa385ab0-fe00-47cb-be3c-0f643bd2a3dc" containerName="dnsmasq-dns" containerID="cri-o://3ae6bbb38d68098ce60c1159e19db2d9a15965fc0ea0cda89f3f0e7806e19466" gracePeriod=10 Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.187404 5002 generic.go:334] "Generic (PLEG): container finished" podID="fa385ab0-fe00-47cb-be3c-0f643bd2a3dc" containerID="3ae6bbb38d68098ce60c1159e19db2d9a15965fc0ea0cda89f3f0e7806e19466" exitCode=0 Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.187529 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" event={"ID":"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc","Type":"ContainerDied","Data":"3ae6bbb38d68098ce60c1159e19db2d9a15965fc0ea0cda89f3f0e7806e19466"} Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.189002 5002 generic.go:334] "Generic (PLEG): container finished" podID="c4e8b9fc-5aff-4584-974f-ad060b75f0c3" containerID="0b2e9bbf92e51a3d7e2593be1b5ed29afadceb9b66b7a114d28b4ba438b71372" exitCode=0 Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.189126 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-6qsgx" event={"ID":"c4e8b9fc-5aff-4584-974f-ad060b75f0c3","Type":"ContainerDied","Data":"0b2e9bbf92e51a3d7e2593be1b5ed29afadceb9b66b7a114d28b4ba438b71372"} Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.191415 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-8kcwc" Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.191531 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-8kcwc" event={"ID":"cd4d414b-cd09-4feb-913f-174f17996cd1","Type":"ContainerDied","Data":"8ab0b99967b51121d912446d9452b8329fe1d36db8124f5f9b28cf6249eb5ea8"} Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.192184 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ab0b99967b51121d912446d9452b8329fe1d36db8124f5f9b28cf6249eb5ea8" Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.248429 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.380172 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.407744 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.408022 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f6bdc68b-5ea5-468d-bd40-e0c04652a581" containerName="nova-api-log" containerID="cri-o://7b8086bab6889bdc08ab3652a321373821a4c214087e8c8d36de3105d5b111ba" gracePeriod=30 Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.408501 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f6bdc68b-5ea5-468d-bd40-e0c04652a581" containerName="nova-api-api" containerID="cri-o://34d2fc4ed30de1144e68bb88be6d1aaed0105f0d96ec1bbf05530afd14bc4427" gracePeriod=30 Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.416800 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f6bdc68b-5ea5-468d-bd40-e0c04652a581" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": EOF" Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.416852 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f6bdc68b-5ea5-468d-bd40-e0c04652a581" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": EOF" Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.433789 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.434043 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="eb4a14e7-0b7a-4157-9a98-678afa5e648f" containerName="nova-metadata-log" containerID="cri-o://803d39cd85d20fea5e2b7e535994c5ef77ab29ecfa07f69a6156e8ab901179cc" gracePeriod=30 Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.434193 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="eb4a14e7-0b7a-4157-9a98-678afa5e648f" containerName="nova-metadata-metadata" containerID="cri-o://481e8ee3670a25b9d235d5a11ddb97f336738396ae5988f10cb9eaa33583035b" gracePeriod=30 Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.460044 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x59j6\" (UniqueName: \"kubernetes.io/projected/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-kube-api-access-x59j6\") pod \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\" (UID: \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\") " Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.460114 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-ovsdbserver-nb\") pod \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\" (UID: \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\") " Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.460157 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-ovsdbserver-sb\") pod \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\" (UID: \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\") " Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.460187 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-dns-svc\") pod \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\" (UID: \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\") " Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.460260 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-config\") pod \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\" (UID: \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\") " Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.460326 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-dns-swift-storage-0\") pod \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\" (UID: \"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc\") " Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.470997 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-kube-api-access-x59j6" (OuterVolumeSpecName: "kube-api-access-x59j6") pod "fa385ab0-fe00-47cb-be3c-0f643bd2a3dc" (UID: "fa385ab0-fe00-47cb-be3c-0f643bd2a3dc"). InnerVolumeSpecName "kube-api-access-x59j6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.521387 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "fa385ab0-fe00-47cb-be3c-0f643bd2a3dc" (UID: "fa385ab0-fe00-47cb-be3c-0f643bd2a3dc"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.534751 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "fa385ab0-fe00-47cb-be3c-0f643bd2a3dc" (UID: "fa385ab0-fe00-47cb-be3c-0f643bd2a3dc"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.534985 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "fa385ab0-fe00-47cb-be3c-0f643bd2a3dc" (UID: "fa385ab0-fe00-47cb-be3c-0f643bd2a3dc"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.538254 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "fa385ab0-fe00-47cb-be3c-0f643bd2a3dc" (UID: "fa385ab0-fe00-47cb-be3c-0f643bd2a3dc"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.548128 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-config" (OuterVolumeSpecName: "config") pod "fa385ab0-fe00-47cb-be3c-0f643bd2a3dc" (UID: "fa385ab0-fe00-47cb-be3c-0f643bd2a3dc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.562730 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x59j6\" (UniqueName: \"kubernetes.io/projected/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-kube-api-access-x59j6\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.562769 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.562778 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.562787 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.562796 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.562804 5002 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.616418 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.616458 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.981335 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 12:39:23 crc kubenswrapper[5002]: I0930 12:39:23.991111 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.070593 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bv2x9\" (UniqueName: \"kubernetes.io/projected/eb4a14e7-0b7a-4157-9a98-678afa5e648f-kube-api-access-bv2x9\") pod \"eb4a14e7-0b7a-4157-9a98-678afa5e648f\" (UID: \"eb4a14e7-0b7a-4157-9a98-678afa5e648f\") " Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.070655 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb4a14e7-0b7a-4157-9a98-678afa5e648f-nova-metadata-tls-certs\") pod \"eb4a14e7-0b7a-4157-9a98-678afa5e648f\" (UID: \"eb4a14e7-0b7a-4157-9a98-678afa5e648f\") " Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.070690 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb4a14e7-0b7a-4157-9a98-678afa5e648f-config-data\") pod \"eb4a14e7-0b7a-4157-9a98-678afa5e648f\" (UID: \"eb4a14e7-0b7a-4157-9a98-678afa5e648f\") " Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.070833 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb4a14e7-0b7a-4157-9a98-678afa5e648f-combined-ca-bundle\") pod \"eb4a14e7-0b7a-4157-9a98-678afa5e648f\" (UID: \"eb4a14e7-0b7a-4157-9a98-678afa5e648f\") " Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.070868 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb4a14e7-0b7a-4157-9a98-678afa5e648f-logs\") pod \"eb4a14e7-0b7a-4157-9a98-678afa5e648f\" (UID: \"eb4a14e7-0b7a-4157-9a98-678afa5e648f\") " Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.071317 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb4a14e7-0b7a-4157-9a98-678afa5e648f-logs" (OuterVolumeSpecName: "logs") pod "eb4a14e7-0b7a-4157-9a98-678afa5e648f" (UID: "eb4a14e7-0b7a-4157-9a98-678afa5e648f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.079555 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb4a14e7-0b7a-4157-9a98-678afa5e648f-kube-api-access-bv2x9" (OuterVolumeSpecName: "kube-api-access-bv2x9") pod "eb4a14e7-0b7a-4157-9a98-678afa5e648f" (UID: "eb4a14e7-0b7a-4157-9a98-678afa5e648f"). InnerVolumeSpecName "kube-api-access-bv2x9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.096781 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb4a14e7-0b7a-4157-9a98-678afa5e648f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eb4a14e7-0b7a-4157-9a98-678afa5e648f" (UID: "eb4a14e7-0b7a-4157-9a98-678afa5e648f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.110554 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb4a14e7-0b7a-4157-9a98-678afa5e648f-config-data" (OuterVolumeSpecName: "config-data") pod "eb4a14e7-0b7a-4157-9a98-678afa5e648f" (UID: "eb4a14e7-0b7a-4157-9a98-678afa5e648f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.126458 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb4a14e7-0b7a-4157-9a98-678afa5e648f-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "eb4a14e7-0b7a-4157-9a98-678afa5e648f" (UID: "eb4a14e7-0b7a-4157-9a98-678afa5e648f"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.172911 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bv2x9\" (UniqueName: \"kubernetes.io/projected/eb4a14e7-0b7a-4157-9a98-678afa5e648f-kube-api-access-bv2x9\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.172945 5002 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb4a14e7-0b7a-4157-9a98-678afa5e648f-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.172959 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb4a14e7-0b7a-4157-9a98-678afa5e648f-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.172972 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb4a14e7-0b7a-4157-9a98-678afa5e648f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.172983 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb4a14e7-0b7a-4157-9a98-678afa5e648f-logs\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.201763 5002 generic.go:334] "Generic (PLEG): container finished" podID="f6bdc68b-5ea5-468d-bd40-e0c04652a581" containerID="7b8086bab6889bdc08ab3652a321373821a4c214087e8c8d36de3105d5b111ba" exitCode=143 Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.201817 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f6bdc68b-5ea5-468d-bd40-e0c04652a581","Type":"ContainerDied","Data":"7b8086bab6889bdc08ab3652a321373821a4c214087e8c8d36de3105d5b111ba"} Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.203843 5002 generic.go:334] "Generic (PLEG): container finished" podID="eb4a14e7-0b7a-4157-9a98-678afa5e648f" containerID="481e8ee3670a25b9d235d5a11ddb97f336738396ae5988f10cb9eaa33583035b" exitCode=0 Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.203966 5002 generic.go:334] "Generic (PLEG): container finished" podID="eb4a14e7-0b7a-4157-9a98-678afa5e648f" containerID="803d39cd85d20fea5e2b7e535994c5ef77ab29ecfa07f69a6156e8ab901179cc" exitCode=143 Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.204187 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"eb4a14e7-0b7a-4157-9a98-678afa5e648f","Type":"ContainerDied","Data":"481e8ee3670a25b9d235d5a11ddb97f336738396ae5988f10cb9eaa33583035b"} Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.204306 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"eb4a14e7-0b7a-4157-9a98-678afa5e648f","Type":"ContainerDied","Data":"803d39cd85d20fea5e2b7e535994c5ef77ab29ecfa07f69a6156e8ab901179cc"} Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.204394 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"eb4a14e7-0b7a-4157-9a98-678afa5e648f","Type":"ContainerDied","Data":"fffbeb2b0d3299f024d32c876e3f45d57a819a4f98a1426342101309497a61df"} Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.204506 5002 scope.go:117] "RemoveContainer" containerID="481e8ee3670a25b9d235d5a11ddb97f336738396ae5988f10cb9eaa33583035b" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.204748 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.209035 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.217744 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-cq2vq" event={"ID":"fa385ab0-fe00-47cb-be3c-0f643bd2a3dc","Type":"ContainerDied","Data":"cd867b89bdb5d3a3ce9100106a5f48b103ebb9c853b56d5c2ec9d6b0a42d562c"} Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.234208 5002 scope.go:117] "RemoveContainer" containerID="803d39cd85d20fea5e2b7e535994c5ef77ab29ecfa07f69a6156e8ab901179cc" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.250518 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-cq2vq"] Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.258592 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-cq2vq"] Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.266953 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.271874 5002 scope.go:117] "RemoveContainer" containerID="481e8ee3670a25b9d235d5a11ddb97f336738396ae5988f10cb9eaa33583035b" Sep 30 12:39:24 crc kubenswrapper[5002]: E0930 12:39:24.272416 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"481e8ee3670a25b9d235d5a11ddb97f336738396ae5988f10cb9eaa33583035b\": container with ID starting with 481e8ee3670a25b9d235d5a11ddb97f336738396ae5988f10cb9eaa33583035b not found: ID does not exist" containerID="481e8ee3670a25b9d235d5a11ddb97f336738396ae5988f10cb9eaa33583035b" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.272455 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"481e8ee3670a25b9d235d5a11ddb97f336738396ae5988f10cb9eaa33583035b"} err="failed to get container status \"481e8ee3670a25b9d235d5a11ddb97f336738396ae5988f10cb9eaa33583035b\": rpc error: code = NotFound desc = could not find container \"481e8ee3670a25b9d235d5a11ddb97f336738396ae5988f10cb9eaa33583035b\": container with ID starting with 481e8ee3670a25b9d235d5a11ddb97f336738396ae5988f10cb9eaa33583035b not found: ID does not exist" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.272496 5002 scope.go:117] "RemoveContainer" containerID="803d39cd85d20fea5e2b7e535994c5ef77ab29ecfa07f69a6156e8ab901179cc" Sep 30 12:39:24 crc kubenswrapper[5002]: E0930 12:39:24.272822 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"803d39cd85d20fea5e2b7e535994c5ef77ab29ecfa07f69a6156e8ab901179cc\": container with ID starting with 803d39cd85d20fea5e2b7e535994c5ef77ab29ecfa07f69a6156e8ab901179cc not found: ID does not exist" containerID="803d39cd85d20fea5e2b7e535994c5ef77ab29ecfa07f69a6156e8ab901179cc" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.272859 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"803d39cd85d20fea5e2b7e535994c5ef77ab29ecfa07f69a6156e8ab901179cc"} err="failed to get container status \"803d39cd85d20fea5e2b7e535994c5ef77ab29ecfa07f69a6156e8ab901179cc\": rpc error: code = NotFound desc = could not find container \"803d39cd85d20fea5e2b7e535994c5ef77ab29ecfa07f69a6156e8ab901179cc\": container with ID starting with 803d39cd85d20fea5e2b7e535994c5ef77ab29ecfa07f69a6156e8ab901179cc not found: ID does not exist" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.272884 5002 scope.go:117] "RemoveContainer" containerID="481e8ee3670a25b9d235d5a11ddb97f336738396ae5988f10cb9eaa33583035b" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.273196 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"481e8ee3670a25b9d235d5a11ddb97f336738396ae5988f10cb9eaa33583035b"} err="failed to get container status \"481e8ee3670a25b9d235d5a11ddb97f336738396ae5988f10cb9eaa33583035b\": rpc error: code = NotFound desc = could not find container \"481e8ee3670a25b9d235d5a11ddb97f336738396ae5988f10cb9eaa33583035b\": container with ID starting with 481e8ee3670a25b9d235d5a11ddb97f336738396ae5988f10cb9eaa33583035b not found: ID does not exist" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.273225 5002 scope.go:117] "RemoveContainer" containerID="803d39cd85d20fea5e2b7e535994c5ef77ab29ecfa07f69a6156e8ab901179cc" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.273458 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"803d39cd85d20fea5e2b7e535994c5ef77ab29ecfa07f69a6156e8ab901179cc"} err="failed to get container status \"803d39cd85d20fea5e2b7e535994c5ef77ab29ecfa07f69a6156e8ab901179cc\": rpc error: code = NotFound desc = could not find container \"803d39cd85d20fea5e2b7e535994c5ef77ab29ecfa07f69a6156e8ab901179cc\": container with ID starting with 803d39cd85d20fea5e2b7e535994c5ef77ab29ecfa07f69a6156e8ab901179cc not found: ID does not exist" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.273498 5002 scope.go:117] "RemoveContainer" containerID="3ae6bbb38d68098ce60c1159e19db2d9a15965fc0ea0cda89f3f0e7806e19466" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.278355 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.291931 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 12:39:24 crc kubenswrapper[5002]: E0930 12:39:24.292395 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa385ab0-fe00-47cb-be3c-0f643bd2a3dc" containerName="init" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.292413 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa385ab0-fe00-47cb-be3c-0f643bd2a3dc" containerName="init" Sep 30 12:39:24 crc kubenswrapper[5002]: E0930 12:39:24.292427 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa385ab0-fe00-47cb-be3c-0f643bd2a3dc" containerName="dnsmasq-dns" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.292433 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa385ab0-fe00-47cb-be3c-0f643bd2a3dc" containerName="dnsmasq-dns" Sep 30 12:39:24 crc kubenswrapper[5002]: E0930 12:39:24.292455 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb4a14e7-0b7a-4157-9a98-678afa5e648f" containerName="nova-metadata-log" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.292464 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb4a14e7-0b7a-4157-9a98-678afa5e648f" containerName="nova-metadata-log" Sep 30 12:39:24 crc kubenswrapper[5002]: E0930 12:39:24.292495 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb4a14e7-0b7a-4157-9a98-678afa5e648f" containerName="nova-metadata-metadata" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.292502 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb4a14e7-0b7a-4157-9a98-678afa5e648f" containerName="nova-metadata-metadata" Sep 30 12:39:24 crc kubenswrapper[5002]: E0930 12:39:24.292514 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd4d414b-cd09-4feb-913f-174f17996cd1" containerName="nova-manage" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.292520 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd4d414b-cd09-4feb-913f-174f17996cd1" containerName="nova-manage" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.292706 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd4d414b-cd09-4feb-913f-174f17996cd1" containerName="nova-manage" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.292726 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa385ab0-fe00-47cb-be3c-0f643bd2a3dc" containerName="dnsmasq-dns" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.292736 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb4a14e7-0b7a-4157-9a98-678afa5e648f" containerName="nova-metadata-metadata" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.292751 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb4a14e7-0b7a-4157-9a98-678afa5e648f" containerName="nova-metadata-log" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.293867 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.296977 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.297207 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.299038 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.330056 5002 scope.go:117] "RemoveContainer" containerID="1dbc3dc2eec7aad68d60b3e1467b5c442a47d143f4db5f09972403738b2479ac" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.379707 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-logs\") pod \"nova-metadata-0\" (UID: \"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83\") " pod="openstack/nova-metadata-0" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.379788 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83\") " pod="openstack/nova-metadata-0" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.379850 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83\") " pod="openstack/nova-metadata-0" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.379882 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtk9x\" (UniqueName: \"kubernetes.io/projected/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-kube-api-access-rtk9x\") pod \"nova-metadata-0\" (UID: \"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83\") " pod="openstack/nova-metadata-0" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.379911 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-config-data\") pod \"nova-metadata-0\" (UID: \"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83\") " pod="openstack/nova-metadata-0" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.481505 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-logs\") pod \"nova-metadata-0\" (UID: \"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83\") " pod="openstack/nova-metadata-0" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.481883 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83\") " pod="openstack/nova-metadata-0" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.481958 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83\") " pod="openstack/nova-metadata-0" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.481995 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtk9x\" (UniqueName: \"kubernetes.io/projected/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-kube-api-access-rtk9x\") pod \"nova-metadata-0\" (UID: \"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83\") " pod="openstack/nova-metadata-0" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.482044 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-config-data\") pod \"nova-metadata-0\" (UID: \"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83\") " pod="openstack/nova-metadata-0" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.482504 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-logs\") pod \"nova-metadata-0\" (UID: \"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83\") " pod="openstack/nova-metadata-0" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.487647 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83\") " pod="openstack/nova-metadata-0" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.491847 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83\") " pod="openstack/nova-metadata-0" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.501115 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-config-data\") pod \"nova-metadata-0\" (UID: \"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83\") " pod="openstack/nova-metadata-0" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.506547 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtk9x\" (UniqueName: \"kubernetes.io/projected/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-kube-api-access-rtk9x\") pod \"nova-metadata-0\" (UID: \"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83\") " pod="openstack/nova-metadata-0" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.629421 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.634350 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-6qsgx" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.685371 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4e8b9fc-5aff-4584-974f-ad060b75f0c3-combined-ca-bundle\") pod \"c4e8b9fc-5aff-4584-974f-ad060b75f0c3\" (UID: \"c4e8b9fc-5aff-4584-974f-ad060b75f0c3\") " Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.685514 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tpzgr\" (UniqueName: \"kubernetes.io/projected/c4e8b9fc-5aff-4584-974f-ad060b75f0c3-kube-api-access-tpzgr\") pod \"c4e8b9fc-5aff-4584-974f-ad060b75f0c3\" (UID: \"c4e8b9fc-5aff-4584-974f-ad060b75f0c3\") " Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.685667 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4e8b9fc-5aff-4584-974f-ad060b75f0c3-config-data\") pod \"c4e8b9fc-5aff-4584-974f-ad060b75f0c3\" (UID: \"c4e8b9fc-5aff-4584-974f-ad060b75f0c3\") " Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.685732 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4e8b9fc-5aff-4584-974f-ad060b75f0c3-scripts\") pod \"c4e8b9fc-5aff-4584-974f-ad060b75f0c3\" (UID: \"c4e8b9fc-5aff-4584-974f-ad060b75f0c3\") " Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.700571 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4e8b9fc-5aff-4584-974f-ad060b75f0c3-scripts" (OuterVolumeSpecName: "scripts") pod "c4e8b9fc-5aff-4584-974f-ad060b75f0c3" (UID: "c4e8b9fc-5aff-4584-974f-ad060b75f0c3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.703331 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4e8b9fc-5aff-4584-974f-ad060b75f0c3-kube-api-access-tpzgr" (OuterVolumeSpecName: "kube-api-access-tpzgr") pod "c4e8b9fc-5aff-4584-974f-ad060b75f0c3" (UID: "c4e8b9fc-5aff-4584-974f-ad060b75f0c3"). InnerVolumeSpecName "kube-api-access-tpzgr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.707494 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb4a14e7-0b7a-4157-9a98-678afa5e648f" path="/var/lib/kubelet/pods/eb4a14e7-0b7a-4157-9a98-678afa5e648f/volumes" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.708122 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa385ab0-fe00-47cb-be3c-0f643bd2a3dc" path="/var/lib/kubelet/pods/fa385ab0-fe00-47cb-be3c-0f643bd2a3dc/volumes" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.721299 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4e8b9fc-5aff-4584-974f-ad060b75f0c3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c4e8b9fc-5aff-4584-974f-ad060b75f0c3" (UID: "c4e8b9fc-5aff-4584-974f-ad060b75f0c3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.731705 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4e8b9fc-5aff-4584-974f-ad060b75f0c3-config-data" (OuterVolumeSpecName: "config-data") pod "c4e8b9fc-5aff-4584-974f-ad060b75f0c3" (UID: "c4e8b9fc-5aff-4584-974f-ad060b75f0c3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.787950 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4e8b9fc-5aff-4584-974f-ad060b75f0c3-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.788345 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4e8b9fc-5aff-4584-974f-ad060b75f0c3-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.788359 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4e8b9fc-5aff-4584-974f-ad060b75f0c3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:24 crc kubenswrapper[5002]: I0930 12:39:24.788377 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tpzgr\" (UniqueName: \"kubernetes.io/projected/c4e8b9fc-5aff-4584-974f-ad060b75f0c3-kube-api-access-tpzgr\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:25 crc kubenswrapper[5002]: I0930 12:39:25.106407 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 12:39:25 crc kubenswrapper[5002]: I0930 12:39:25.230396 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-6qsgx" event={"ID":"c4e8b9fc-5aff-4584-974f-ad060b75f0c3","Type":"ContainerDied","Data":"4473eed15ed841d4fbb3d9de3dd9cd5789c6228945323b5545cbea6299e5a04c"} Sep 30 12:39:25 crc kubenswrapper[5002]: I0930 12:39:25.230765 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4473eed15ed841d4fbb3d9de3dd9cd5789c6228945323b5545cbea6299e5a04c" Sep 30 12:39:25 crc kubenswrapper[5002]: I0930 12:39:25.230876 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-6qsgx" Sep 30 12:39:25 crc kubenswrapper[5002]: I0930 12:39:25.232165 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="d9c49bf4-915c-4efb-81ed-b8c7a393d371" containerName="nova-scheduler-scheduler" containerID="cri-o://5590adf8564ec87f88f4ab86615dc85efc1aefc270ae86356444dff4296da921" gracePeriod=30 Sep 30 12:39:25 crc kubenswrapper[5002]: I0930 12:39:25.232298 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83","Type":"ContainerStarted","Data":"c8a4e29eeaa937bc31908adaa6785f14434ecc16cc601d003ef174de8fe6b7a4"} Sep 30 12:39:25 crc kubenswrapper[5002]: I0930 12:39:25.289351 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 12:39:25 crc kubenswrapper[5002]: E0930 12:39:25.289809 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4e8b9fc-5aff-4584-974f-ad060b75f0c3" containerName="nova-cell1-conductor-db-sync" Sep 30 12:39:25 crc kubenswrapper[5002]: I0930 12:39:25.289830 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4e8b9fc-5aff-4584-974f-ad060b75f0c3" containerName="nova-cell1-conductor-db-sync" Sep 30 12:39:25 crc kubenswrapper[5002]: I0930 12:39:25.290073 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4e8b9fc-5aff-4584-974f-ad060b75f0c3" containerName="nova-cell1-conductor-db-sync" Sep 30 12:39:25 crc kubenswrapper[5002]: I0930 12:39:25.290674 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 12:39:25 crc kubenswrapper[5002]: I0930 12:39:25.292874 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 30 12:39:25 crc kubenswrapper[5002]: I0930 12:39:25.299751 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 12:39:25 crc kubenswrapper[5002]: I0930 12:39:25.404096 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlwr5\" (UniqueName: \"kubernetes.io/projected/b6734a59-b52d-4116-bd32-31431b949757-kube-api-access-tlwr5\") pod \"nova-cell1-conductor-0\" (UID: \"b6734a59-b52d-4116-bd32-31431b949757\") " pod="openstack/nova-cell1-conductor-0" Sep 30 12:39:25 crc kubenswrapper[5002]: I0930 12:39:25.404152 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6734a59-b52d-4116-bd32-31431b949757-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"b6734a59-b52d-4116-bd32-31431b949757\") " pod="openstack/nova-cell1-conductor-0" Sep 30 12:39:25 crc kubenswrapper[5002]: I0930 12:39:25.404258 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6734a59-b52d-4116-bd32-31431b949757-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"b6734a59-b52d-4116-bd32-31431b949757\") " pod="openstack/nova-cell1-conductor-0" Sep 30 12:39:25 crc kubenswrapper[5002]: I0930 12:39:25.505796 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlwr5\" (UniqueName: \"kubernetes.io/projected/b6734a59-b52d-4116-bd32-31431b949757-kube-api-access-tlwr5\") pod \"nova-cell1-conductor-0\" (UID: \"b6734a59-b52d-4116-bd32-31431b949757\") " pod="openstack/nova-cell1-conductor-0" Sep 30 12:39:25 crc kubenswrapper[5002]: I0930 12:39:25.505842 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6734a59-b52d-4116-bd32-31431b949757-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"b6734a59-b52d-4116-bd32-31431b949757\") " pod="openstack/nova-cell1-conductor-0" Sep 30 12:39:25 crc kubenswrapper[5002]: I0930 12:39:25.505918 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6734a59-b52d-4116-bd32-31431b949757-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"b6734a59-b52d-4116-bd32-31431b949757\") " pod="openstack/nova-cell1-conductor-0" Sep 30 12:39:25 crc kubenswrapper[5002]: I0930 12:39:25.509596 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6734a59-b52d-4116-bd32-31431b949757-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"b6734a59-b52d-4116-bd32-31431b949757\") " pod="openstack/nova-cell1-conductor-0" Sep 30 12:39:25 crc kubenswrapper[5002]: I0930 12:39:25.511073 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6734a59-b52d-4116-bd32-31431b949757-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"b6734a59-b52d-4116-bd32-31431b949757\") " pod="openstack/nova-cell1-conductor-0" Sep 30 12:39:25 crc kubenswrapper[5002]: I0930 12:39:25.528371 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlwr5\" (UniqueName: \"kubernetes.io/projected/b6734a59-b52d-4116-bd32-31431b949757-kube-api-access-tlwr5\") pod \"nova-cell1-conductor-0\" (UID: \"b6734a59-b52d-4116-bd32-31431b949757\") " pod="openstack/nova-cell1-conductor-0" Sep 30 12:39:25 crc kubenswrapper[5002]: I0930 12:39:25.624164 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 12:39:26 crc kubenswrapper[5002]: I0930 12:39:26.085776 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 12:39:26 crc kubenswrapper[5002]: W0930 12:39:26.095171 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb6734a59_b52d_4116_bd32_31431b949757.slice/crio-e3ef3d3927e1004d342e4980369756f1d335bf02c6635986722a090b90ac997d WatchSource:0}: Error finding container e3ef3d3927e1004d342e4980369756f1d335bf02c6635986722a090b90ac997d: Status 404 returned error can't find the container with id e3ef3d3927e1004d342e4980369756f1d335bf02c6635986722a090b90ac997d Sep 30 12:39:26 crc kubenswrapper[5002]: I0930 12:39:26.248089 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83","Type":"ContainerStarted","Data":"07a85959d4119add40b7fa627c82be82afc4eca869d90b2ae9adee1e50c283ef"} Sep 30 12:39:26 crc kubenswrapper[5002]: I0930 12:39:26.248149 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83","Type":"ContainerStarted","Data":"8522f43ae6027cb6e0fbd468460668db4682d9c645ae511d039a53059ab12116"} Sep 30 12:39:26 crc kubenswrapper[5002]: I0930 12:39:26.249199 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"b6734a59-b52d-4116-bd32-31431b949757","Type":"ContainerStarted","Data":"e3ef3d3927e1004d342e4980369756f1d335bf02c6635986722a090b90ac997d"} Sep 30 12:39:26 crc kubenswrapper[5002]: I0930 12:39:26.272169 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.272141874 podStartE2EDuration="2.272141874s" podCreationTimestamp="2025-09-30 12:39:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:39:26.26545264 +0000 UTC m=+1140.515134786" watchObservedRunningTime="2025-09-30 12:39:26.272141874 +0000 UTC m=+1140.521824040" Sep 30 12:39:27 crc kubenswrapper[5002]: I0930 12:39:27.258676 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"b6734a59-b52d-4116-bd32-31431b949757","Type":"ContainerStarted","Data":"90a6887079123a14701fb2744647f89b97e532373afee90676af894f1adb504c"} Sep 30 12:39:27 crc kubenswrapper[5002]: I0930 12:39:27.259174 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Sep 30 12:39:27 crc kubenswrapper[5002]: I0930 12:39:27.285428 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.2854126790000002 podStartE2EDuration="2.285412679s" podCreationTimestamp="2025-09-30 12:39:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:39:27.278927339 +0000 UTC m=+1141.528609505" watchObservedRunningTime="2025-09-30 12:39:27.285412679 +0000 UTC m=+1141.535094825" Sep 30 12:39:27 crc kubenswrapper[5002]: E0930 12:39:27.533518 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5590adf8564ec87f88f4ab86615dc85efc1aefc270ae86356444dff4296da921" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 12:39:27 crc kubenswrapper[5002]: E0930 12:39:27.535281 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5590adf8564ec87f88f4ab86615dc85efc1aefc270ae86356444dff4296da921" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 12:39:27 crc kubenswrapper[5002]: E0930 12:39:27.536757 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5590adf8564ec87f88f4ab86615dc85efc1aefc270ae86356444dff4296da921" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 12:39:27 crc kubenswrapper[5002]: E0930 12:39:27.536856 5002 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="d9c49bf4-915c-4efb-81ed-b8c7a393d371" containerName="nova-scheduler-scheduler" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.006089 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.069436 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2pjf7\" (UniqueName: \"kubernetes.io/projected/d9c49bf4-915c-4efb-81ed-b8c7a393d371-kube-api-access-2pjf7\") pod \"d9c49bf4-915c-4efb-81ed-b8c7a393d371\" (UID: \"d9c49bf4-915c-4efb-81ed-b8c7a393d371\") " Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.069579 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9c49bf4-915c-4efb-81ed-b8c7a393d371-combined-ca-bundle\") pod \"d9c49bf4-915c-4efb-81ed-b8c7a393d371\" (UID: \"d9c49bf4-915c-4efb-81ed-b8c7a393d371\") " Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.069717 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9c49bf4-915c-4efb-81ed-b8c7a393d371-config-data\") pod \"d9c49bf4-915c-4efb-81ed-b8c7a393d371\" (UID: \"d9c49bf4-915c-4efb-81ed-b8c7a393d371\") " Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.075576 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9c49bf4-915c-4efb-81ed-b8c7a393d371-kube-api-access-2pjf7" (OuterVolumeSpecName: "kube-api-access-2pjf7") pod "d9c49bf4-915c-4efb-81ed-b8c7a393d371" (UID: "d9c49bf4-915c-4efb-81ed-b8c7a393d371"). InnerVolumeSpecName "kube-api-access-2pjf7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.109246 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9c49bf4-915c-4efb-81ed-b8c7a393d371-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d9c49bf4-915c-4efb-81ed-b8c7a393d371" (UID: "d9c49bf4-915c-4efb-81ed-b8c7a393d371"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.114931 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9c49bf4-915c-4efb-81ed-b8c7a393d371-config-data" (OuterVolumeSpecName: "config-data") pod "d9c49bf4-915c-4efb-81ed-b8c7a393d371" (UID: "d9c49bf4-915c-4efb-81ed-b8c7a393d371"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.172403 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9c49bf4-915c-4efb-81ed-b8c7a393d371-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.172532 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2pjf7\" (UniqueName: \"kubernetes.io/projected/d9c49bf4-915c-4efb-81ed-b8c7a393d371-kube-api-access-2pjf7\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.172544 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9c49bf4-915c-4efb-81ed-b8c7a393d371-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.237050 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.273339 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6bdc68b-5ea5-468d-bd40-e0c04652a581-config-data\") pod \"f6bdc68b-5ea5-468d-bd40-e0c04652a581\" (UID: \"f6bdc68b-5ea5-468d-bd40-e0c04652a581\") " Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.273431 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5dgq\" (UniqueName: \"kubernetes.io/projected/f6bdc68b-5ea5-468d-bd40-e0c04652a581-kube-api-access-r5dgq\") pod \"f6bdc68b-5ea5-468d-bd40-e0c04652a581\" (UID: \"f6bdc68b-5ea5-468d-bd40-e0c04652a581\") " Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.273458 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6bdc68b-5ea5-468d-bd40-e0c04652a581-logs\") pod \"f6bdc68b-5ea5-468d-bd40-e0c04652a581\" (UID: \"f6bdc68b-5ea5-468d-bd40-e0c04652a581\") " Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.273555 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6bdc68b-5ea5-468d-bd40-e0c04652a581-combined-ca-bundle\") pod \"f6bdc68b-5ea5-468d-bd40-e0c04652a581\" (UID: \"f6bdc68b-5ea5-468d-bd40-e0c04652a581\") " Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.274355 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f6bdc68b-5ea5-468d-bd40-e0c04652a581-logs" (OuterVolumeSpecName: "logs") pod "f6bdc68b-5ea5-468d-bd40-e0c04652a581" (UID: "f6bdc68b-5ea5-468d-bd40-e0c04652a581"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.280209 5002 generic.go:334] "Generic (PLEG): container finished" podID="d9c49bf4-915c-4efb-81ed-b8c7a393d371" containerID="5590adf8564ec87f88f4ab86615dc85efc1aefc270ae86356444dff4296da921" exitCode=0 Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.280285 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d9c49bf4-915c-4efb-81ed-b8c7a393d371","Type":"ContainerDied","Data":"5590adf8564ec87f88f4ab86615dc85efc1aefc270ae86356444dff4296da921"} Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.280317 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d9c49bf4-915c-4efb-81ed-b8c7a393d371","Type":"ContainerDied","Data":"697d739af157050a5f833e1d128d8eb7393cfdb66cb46ee01bbe4575d3e11c6f"} Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.280334 5002 scope.go:117] "RemoveContainer" containerID="5590adf8564ec87f88f4ab86615dc85efc1aefc270ae86356444dff4296da921" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.280386 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.283255 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6bdc68b-5ea5-468d-bd40-e0c04652a581-kube-api-access-r5dgq" (OuterVolumeSpecName: "kube-api-access-r5dgq") pod "f6bdc68b-5ea5-468d-bd40-e0c04652a581" (UID: "f6bdc68b-5ea5-468d-bd40-e0c04652a581"). InnerVolumeSpecName "kube-api-access-r5dgq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.284259 5002 generic.go:334] "Generic (PLEG): container finished" podID="f6bdc68b-5ea5-468d-bd40-e0c04652a581" containerID="34d2fc4ed30de1144e68bb88be6d1aaed0105f0d96ec1bbf05530afd14bc4427" exitCode=0 Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.284296 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f6bdc68b-5ea5-468d-bd40-e0c04652a581","Type":"ContainerDied","Data":"34d2fc4ed30de1144e68bb88be6d1aaed0105f0d96ec1bbf05530afd14bc4427"} Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.284328 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f6bdc68b-5ea5-468d-bd40-e0c04652a581","Type":"ContainerDied","Data":"f7410c791cf59d6a0cb4baefb59141e90799fc06abd4aec490d6774bd152fade"} Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.284396 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.303636 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6bdc68b-5ea5-468d-bd40-e0c04652a581-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f6bdc68b-5ea5-468d-bd40-e0c04652a581" (UID: "f6bdc68b-5ea5-468d-bd40-e0c04652a581"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.310970 5002 scope.go:117] "RemoveContainer" containerID="5590adf8564ec87f88f4ab86615dc85efc1aefc270ae86356444dff4296da921" Sep 30 12:39:29 crc kubenswrapper[5002]: E0930 12:39:29.313635 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5590adf8564ec87f88f4ab86615dc85efc1aefc270ae86356444dff4296da921\": container with ID starting with 5590adf8564ec87f88f4ab86615dc85efc1aefc270ae86356444dff4296da921 not found: ID does not exist" containerID="5590adf8564ec87f88f4ab86615dc85efc1aefc270ae86356444dff4296da921" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.313691 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5590adf8564ec87f88f4ab86615dc85efc1aefc270ae86356444dff4296da921"} err="failed to get container status \"5590adf8564ec87f88f4ab86615dc85efc1aefc270ae86356444dff4296da921\": rpc error: code = NotFound desc = could not find container \"5590adf8564ec87f88f4ab86615dc85efc1aefc270ae86356444dff4296da921\": container with ID starting with 5590adf8564ec87f88f4ab86615dc85efc1aefc270ae86356444dff4296da921 not found: ID does not exist" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.313714 5002 scope.go:117] "RemoveContainer" containerID="34d2fc4ed30de1144e68bb88be6d1aaed0105f0d96ec1bbf05530afd14bc4427" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.329434 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6bdc68b-5ea5-468d-bd40-e0c04652a581-config-data" (OuterVolumeSpecName: "config-data") pod "f6bdc68b-5ea5-468d-bd40-e0c04652a581" (UID: "f6bdc68b-5ea5-468d-bd40-e0c04652a581"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.339904 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.344916 5002 scope.go:117] "RemoveContainer" containerID="7b8086bab6889bdc08ab3652a321373821a4c214087e8c8d36de3105d5b111ba" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.350269 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.376373 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6bdc68b-5ea5-468d-bd40-e0c04652a581-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.376404 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5dgq\" (UniqueName: \"kubernetes.io/projected/f6bdc68b-5ea5-468d-bd40-e0c04652a581-kube-api-access-r5dgq\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.376413 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6bdc68b-5ea5-468d-bd40-e0c04652a581-logs\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.376421 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6bdc68b-5ea5-468d-bd40-e0c04652a581-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.377016 5002 scope.go:117] "RemoveContainer" containerID="34d2fc4ed30de1144e68bb88be6d1aaed0105f0d96ec1bbf05530afd14bc4427" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.377238 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 12:39:29 crc kubenswrapper[5002]: E0930 12:39:29.377490 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34d2fc4ed30de1144e68bb88be6d1aaed0105f0d96ec1bbf05530afd14bc4427\": container with ID starting with 34d2fc4ed30de1144e68bb88be6d1aaed0105f0d96ec1bbf05530afd14bc4427 not found: ID does not exist" containerID="34d2fc4ed30de1144e68bb88be6d1aaed0105f0d96ec1bbf05530afd14bc4427" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.377517 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34d2fc4ed30de1144e68bb88be6d1aaed0105f0d96ec1bbf05530afd14bc4427"} err="failed to get container status \"34d2fc4ed30de1144e68bb88be6d1aaed0105f0d96ec1bbf05530afd14bc4427\": rpc error: code = NotFound desc = could not find container \"34d2fc4ed30de1144e68bb88be6d1aaed0105f0d96ec1bbf05530afd14bc4427\": container with ID starting with 34d2fc4ed30de1144e68bb88be6d1aaed0105f0d96ec1bbf05530afd14bc4427 not found: ID does not exist" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.377536 5002 scope.go:117] "RemoveContainer" containerID="7b8086bab6889bdc08ab3652a321373821a4c214087e8c8d36de3105d5b111ba" Sep 30 12:39:29 crc kubenswrapper[5002]: E0930 12:39:29.377700 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9c49bf4-915c-4efb-81ed-b8c7a393d371" containerName="nova-scheduler-scheduler" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.377715 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9c49bf4-915c-4efb-81ed-b8c7a393d371" containerName="nova-scheduler-scheduler" Sep 30 12:39:29 crc kubenswrapper[5002]: E0930 12:39:29.377723 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6bdc68b-5ea5-468d-bd40-e0c04652a581" containerName="nova-api-api" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.377729 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6bdc68b-5ea5-468d-bd40-e0c04652a581" containerName="nova-api-api" Sep 30 12:39:29 crc kubenswrapper[5002]: E0930 12:39:29.377755 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6bdc68b-5ea5-468d-bd40-e0c04652a581" containerName="nova-api-log" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.377761 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6bdc68b-5ea5-468d-bd40-e0c04652a581" containerName="nova-api-log" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.377941 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6bdc68b-5ea5-468d-bd40-e0c04652a581" containerName="nova-api-api" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.377963 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9c49bf4-915c-4efb-81ed-b8c7a393d371" containerName="nova-scheduler-scheduler" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.377972 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6bdc68b-5ea5-468d-bd40-e0c04652a581" containerName="nova-api-log" Sep 30 12:39:29 crc kubenswrapper[5002]: E0930 12:39:29.377981 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b8086bab6889bdc08ab3652a321373821a4c214087e8c8d36de3105d5b111ba\": container with ID starting with 7b8086bab6889bdc08ab3652a321373821a4c214087e8c8d36de3105d5b111ba not found: ID does not exist" containerID="7b8086bab6889bdc08ab3652a321373821a4c214087e8c8d36de3105d5b111ba" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.378001 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b8086bab6889bdc08ab3652a321373821a4c214087e8c8d36de3105d5b111ba"} err="failed to get container status \"7b8086bab6889bdc08ab3652a321373821a4c214087e8c8d36de3105d5b111ba\": rpc error: code = NotFound desc = could not find container \"7b8086bab6889bdc08ab3652a321373821a4c214087e8c8d36de3105d5b111ba\": container with ID starting with 7b8086bab6889bdc08ab3652a321373821a4c214087e8c8d36de3105d5b111ba not found: ID does not exist" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.378678 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.383445 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.387074 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.478138 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41c5acb5-451a-46fe-8384-be7514e49ea9-config-data\") pod \"nova-scheduler-0\" (UID: \"41c5acb5-451a-46fe-8384-be7514e49ea9\") " pod="openstack/nova-scheduler-0" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.478268 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41c5acb5-451a-46fe-8384-be7514e49ea9-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"41c5acb5-451a-46fe-8384-be7514e49ea9\") " pod="openstack/nova-scheduler-0" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.478744 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9cpg7\" (UniqueName: \"kubernetes.io/projected/41c5acb5-451a-46fe-8384-be7514e49ea9-kube-api-access-9cpg7\") pod \"nova-scheduler-0\" (UID: \"41c5acb5-451a-46fe-8384-be7514e49ea9\") " pod="openstack/nova-scheduler-0" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.580980 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9cpg7\" (UniqueName: \"kubernetes.io/projected/41c5acb5-451a-46fe-8384-be7514e49ea9-kube-api-access-9cpg7\") pod \"nova-scheduler-0\" (UID: \"41c5acb5-451a-46fe-8384-be7514e49ea9\") " pod="openstack/nova-scheduler-0" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.581039 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41c5acb5-451a-46fe-8384-be7514e49ea9-config-data\") pod \"nova-scheduler-0\" (UID: \"41c5acb5-451a-46fe-8384-be7514e49ea9\") " pod="openstack/nova-scheduler-0" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.581105 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41c5acb5-451a-46fe-8384-be7514e49ea9-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"41c5acb5-451a-46fe-8384-be7514e49ea9\") " pod="openstack/nova-scheduler-0" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.585202 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41c5acb5-451a-46fe-8384-be7514e49ea9-config-data\") pod \"nova-scheduler-0\" (UID: \"41c5acb5-451a-46fe-8384-be7514e49ea9\") " pod="openstack/nova-scheduler-0" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.585444 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41c5acb5-451a-46fe-8384-be7514e49ea9-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"41c5acb5-451a-46fe-8384-be7514e49ea9\") " pod="openstack/nova-scheduler-0" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.603596 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9cpg7\" (UniqueName: \"kubernetes.io/projected/41c5acb5-451a-46fe-8384-be7514e49ea9-kube-api-access-9cpg7\") pod \"nova-scheduler-0\" (UID: \"41c5acb5-451a-46fe-8384-be7514e49ea9\") " pod="openstack/nova-scheduler-0" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.630703 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.631730 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.680943 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.690048 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.697966 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.730428 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.748376 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.748539 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.751490 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.785405 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6cd1979-da09-4b1b-afa5-b8a10085918c-config-data\") pod \"nova-api-0\" (UID: \"e6cd1979-da09-4b1b-afa5-b8a10085918c\") " pod="openstack/nova-api-0" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.786460 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbktc\" (UniqueName: \"kubernetes.io/projected/e6cd1979-da09-4b1b-afa5-b8a10085918c-kube-api-access-hbktc\") pod \"nova-api-0\" (UID: \"e6cd1979-da09-4b1b-afa5-b8a10085918c\") " pod="openstack/nova-api-0" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.786507 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6cd1979-da09-4b1b-afa5-b8a10085918c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e6cd1979-da09-4b1b-afa5-b8a10085918c\") " pod="openstack/nova-api-0" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.786651 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6cd1979-da09-4b1b-afa5-b8a10085918c-logs\") pod \"nova-api-0\" (UID: \"e6cd1979-da09-4b1b-afa5-b8a10085918c\") " pod="openstack/nova-api-0" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.888450 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbktc\" (UniqueName: \"kubernetes.io/projected/e6cd1979-da09-4b1b-afa5-b8a10085918c-kube-api-access-hbktc\") pod \"nova-api-0\" (UID: \"e6cd1979-da09-4b1b-afa5-b8a10085918c\") " pod="openstack/nova-api-0" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.888555 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6cd1979-da09-4b1b-afa5-b8a10085918c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e6cd1979-da09-4b1b-afa5-b8a10085918c\") " pod="openstack/nova-api-0" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.888685 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6cd1979-da09-4b1b-afa5-b8a10085918c-logs\") pod \"nova-api-0\" (UID: \"e6cd1979-da09-4b1b-afa5-b8a10085918c\") " pod="openstack/nova-api-0" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.888757 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6cd1979-da09-4b1b-afa5-b8a10085918c-config-data\") pod \"nova-api-0\" (UID: \"e6cd1979-da09-4b1b-afa5-b8a10085918c\") " pod="openstack/nova-api-0" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.889300 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6cd1979-da09-4b1b-afa5-b8a10085918c-logs\") pod \"nova-api-0\" (UID: \"e6cd1979-da09-4b1b-afa5-b8a10085918c\") " pod="openstack/nova-api-0" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.893651 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6cd1979-da09-4b1b-afa5-b8a10085918c-config-data\") pod \"nova-api-0\" (UID: \"e6cd1979-da09-4b1b-afa5-b8a10085918c\") " pod="openstack/nova-api-0" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.907298 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6cd1979-da09-4b1b-afa5-b8a10085918c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e6cd1979-da09-4b1b-afa5-b8a10085918c\") " pod="openstack/nova-api-0" Sep 30 12:39:29 crc kubenswrapper[5002]: I0930 12:39:29.910498 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbktc\" (UniqueName: \"kubernetes.io/projected/e6cd1979-da09-4b1b-afa5-b8a10085918c-kube-api-access-hbktc\") pod \"nova-api-0\" (UID: \"e6cd1979-da09-4b1b-afa5-b8a10085918c\") " pod="openstack/nova-api-0" Sep 30 12:39:30 crc kubenswrapper[5002]: I0930 12:39:30.140175 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 12:39:30 crc kubenswrapper[5002]: I0930 12:39:30.176990 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 12:39:30 crc kubenswrapper[5002]: W0930 12:39:30.187889 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod41c5acb5_451a_46fe_8384_be7514e49ea9.slice/crio-50bf6de3989792d742c856199b54dded5f311d34b1d18e2595320653b057dc7d WatchSource:0}: Error finding container 50bf6de3989792d742c856199b54dded5f311d34b1d18e2595320653b057dc7d: Status 404 returned error can't find the container with id 50bf6de3989792d742c856199b54dded5f311d34b1d18e2595320653b057dc7d Sep 30 12:39:30 crc kubenswrapper[5002]: I0930 12:39:30.301047 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"41c5acb5-451a-46fe-8384-be7514e49ea9","Type":"ContainerStarted","Data":"50bf6de3989792d742c856199b54dded5f311d34b1d18e2595320653b057dc7d"} Sep 30 12:39:30 crc kubenswrapper[5002]: I0930 12:39:30.610669 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 12:39:30 crc kubenswrapper[5002]: I0930 12:39:30.690221 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9c49bf4-915c-4efb-81ed-b8c7a393d371" path="/var/lib/kubelet/pods/d9c49bf4-915c-4efb-81ed-b8c7a393d371/volumes" Sep 30 12:39:30 crc kubenswrapper[5002]: I0930 12:39:30.690989 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6bdc68b-5ea5-468d-bd40-e0c04652a581" path="/var/lib/kubelet/pods/f6bdc68b-5ea5-468d-bd40-e0c04652a581/volumes" Sep 30 12:39:31 crc kubenswrapper[5002]: I0930 12:39:31.263718 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 30 12:39:31 crc kubenswrapper[5002]: I0930 12:39:31.325629 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"41c5acb5-451a-46fe-8384-be7514e49ea9","Type":"ContainerStarted","Data":"ea585a3f09929f61b29d55fd2e68fe5bdb058d85d58893f6c8db15ac0460e776"} Sep 30 12:39:31 crc kubenswrapper[5002]: I0930 12:39:31.333392 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e6cd1979-da09-4b1b-afa5-b8a10085918c","Type":"ContainerStarted","Data":"21360592d52e32cbc823f9b85082078f08331f6f682522de8ef6ea0e02afd5c8"} Sep 30 12:39:31 crc kubenswrapper[5002]: I0930 12:39:31.333432 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e6cd1979-da09-4b1b-afa5-b8a10085918c","Type":"ContainerStarted","Data":"af68c7c8f22bd4d0756157902404dcece595bc7791982f00a673cd24d69bb23f"} Sep 30 12:39:31 crc kubenswrapper[5002]: I0930 12:39:31.333442 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e6cd1979-da09-4b1b-afa5-b8a10085918c","Type":"ContainerStarted","Data":"ad7f28d285b17fac9f094e9fa038ecaf959217c66931dbedc3d0aae72e2e47e7"} Sep 30 12:39:31 crc kubenswrapper[5002]: I0930 12:39:31.375511 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.375462765 podStartE2EDuration="2.375462765s" podCreationTimestamp="2025-09-30 12:39:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:39:31.347178224 +0000 UTC m=+1145.596860380" watchObservedRunningTime="2025-09-30 12:39:31.375462765 +0000 UTC m=+1145.625144911" Sep 30 12:39:31 crc kubenswrapper[5002]: I0930 12:39:31.376088 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.376082492 podStartE2EDuration="2.376082492s" podCreationTimestamp="2025-09-30 12:39:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:39:31.367545886 +0000 UTC m=+1145.617228042" watchObservedRunningTime="2025-09-30 12:39:31.376082492 +0000 UTC m=+1145.625764638" Sep 30 12:39:34 crc kubenswrapper[5002]: I0930 12:39:34.630502 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 12:39:34 crc kubenswrapper[5002]: I0930 12:39:34.630983 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 12:39:34 crc kubenswrapper[5002]: I0930 12:39:34.700697 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 12:39:34 crc kubenswrapper[5002]: I0930 12:39:34.955328 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 12:39:34 crc kubenswrapper[5002]: I0930 12:39:34.955572 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="0fe7ac7f-c7a0-42a5-8089-059c05c40d3e" containerName="kube-state-metrics" containerID="cri-o://8a2465dc4997baab7ca676ac6c30236214abccdb4bd8a14646dcde7c78dc5cfd" gracePeriod=30 Sep 30 12:39:35 crc kubenswrapper[5002]: I0930 12:39:35.370013 5002 generic.go:334] "Generic (PLEG): container finished" podID="0fe7ac7f-c7a0-42a5-8089-059c05c40d3e" containerID="8a2465dc4997baab7ca676ac6c30236214abccdb4bd8a14646dcde7c78dc5cfd" exitCode=2 Sep 30 12:39:35 crc kubenswrapper[5002]: I0930 12:39:35.370112 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"0fe7ac7f-c7a0-42a5-8089-059c05c40d3e","Type":"ContainerDied","Data":"8a2465dc4997baab7ca676ac6c30236214abccdb4bd8a14646dcde7c78dc5cfd"} Sep 30 12:39:35 crc kubenswrapper[5002]: I0930 12:39:35.592992 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 12:39:35 crc kubenswrapper[5002]: I0930 12:39:35.680158 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Sep 30 12:39:35 crc kubenswrapper[5002]: I0930 12:39:35.684653 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b23ec22e-90c8-41b7-bdc8-4861c7ec9c83" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.195:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 12:39:35 crc kubenswrapper[5002]: I0930 12:39:35.684656 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b23ec22e-90c8-41b7-bdc8-4861c7ec9c83" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.195:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 12:39:35 crc kubenswrapper[5002]: I0930 12:39:35.793012 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6l25\" (UniqueName: \"kubernetes.io/projected/0fe7ac7f-c7a0-42a5-8089-059c05c40d3e-kube-api-access-z6l25\") pod \"0fe7ac7f-c7a0-42a5-8089-059c05c40d3e\" (UID: \"0fe7ac7f-c7a0-42a5-8089-059c05c40d3e\") " Sep 30 12:39:35 crc kubenswrapper[5002]: I0930 12:39:35.799523 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fe7ac7f-c7a0-42a5-8089-059c05c40d3e-kube-api-access-z6l25" (OuterVolumeSpecName: "kube-api-access-z6l25") pod "0fe7ac7f-c7a0-42a5-8089-059c05c40d3e" (UID: "0fe7ac7f-c7a0-42a5-8089-059c05c40d3e"). InnerVolumeSpecName "kube-api-access-z6l25". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:39:35 crc kubenswrapper[5002]: I0930 12:39:35.897302 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6l25\" (UniqueName: \"kubernetes.io/projected/0fe7ac7f-c7a0-42a5-8089-059c05c40d3e-kube-api-access-z6l25\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.381120 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"0fe7ac7f-c7a0-42a5-8089-059c05c40d3e","Type":"ContainerDied","Data":"fcbc28edd5b28a6f6278ff654010ab87dba77027aacd53e6eba9deb13a7f08d2"} Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.381233 5002 scope.go:117] "RemoveContainer" containerID="8a2465dc4997baab7ca676ac6c30236214abccdb4bd8a14646dcde7c78dc5cfd" Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.381684 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.432530 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.441612 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.458866 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 12:39:36 crc kubenswrapper[5002]: E0930 12:39:36.459351 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fe7ac7f-c7a0-42a5-8089-059c05c40d3e" containerName="kube-state-metrics" Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.459374 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fe7ac7f-c7a0-42a5-8089-059c05c40d3e" containerName="kube-state-metrics" Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.459661 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fe7ac7f-c7a0-42a5-8089-059c05c40d3e" containerName="kube-state-metrics" Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.460397 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.466960 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.467251 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.490883 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.610956 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/16e6726b-1f13-4bd9-a6a0-326e726dd86a-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"16e6726b-1f13-4bd9-a6a0-326e726dd86a\") " pod="openstack/kube-state-metrics-0" Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.611464 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwcfw\" (UniqueName: \"kubernetes.io/projected/16e6726b-1f13-4bd9-a6a0-326e726dd86a-kube-api-access-gwcfw\") pod \"kube-state-metrics-0\" (UID: \"16e6726b-1f13-4bd9-a6a0-326e726dd86a\") " pod="openstack/kube-state-metrics-0" Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.611726 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/16e6726b-1f13-4bd9-a6a0-326e726dd86a-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"16e6726b-1f13-4bd9-a6a0-326e726dd86a\") " pod="openstack/kube-state-metrics-0" Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.612096 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16e6726b-1f13-4bd9-a6a0-326e726dd86a-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"16e6726b-1f13-4bd9-a6a0-326e726dd86a\") " pod="openstack/kube-state-metrics-0" Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.698968 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fe7ac7f-c7a0-42a5-8089-059c05c40d3e" path="/var/lib/kubelet/pods/0fe7ac7f-c7a0-42a5-8089-059c05c40d3e/volumes" Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.700713 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.701061 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f36c00a3-1b8e-4f69-8690-7f91b49eb8e1" containerName="ceilometer-central-agent" containerID="cri-o://5e3451bb7d4e6357b90125dad5f9bd0db99e24a04ca4dd761c012c386e5e12fa" gracePeriod=30 Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.701793 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f36c00a3-1b8e-4f69-8690-7f91b49eb8e1" containerName="proxy-httpd" containerID="cri-o://3a1c14dcb593b6a010604ddc317205c2511f3c4513f3967c31e4515dde283f42" gracePeriod=30 Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.702336 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f36c00a3-1b8e-4f69-8690-7f91b49eb8e1" containerName="sg-core" containerID="cri-o://ceb447eb0439373b34f517918bce85b8fca65d503059fe5dac95e6792c06a058" gracePeriod=30 Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.702412 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f36c00a3-1b8e-4f69-8690-7f91b49eb8e1" containerName="ceilometer-notification-agent" containerID="cri-o://9f4340c61476e7e180916ae426077ec083f5f78303aab7b1bea55eb501713d9c" gracePeriod=30 Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.713978 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16e6726b-1f13-4bd9-a6a0-326e726dd86a-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"16e6726b-1f13-4bd9-a6a0-326e726dd86a\") " pod="openstack/kube-state-metrics-0" Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.714064 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/16e6726b-1f13-4bd9-a6a0-326e726dd86a-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"16e6726b-1f13-4bd9-a6a0-326e726dd86a\") " pod="openstack/kube-state-metrics-0" Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.714112 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwcfw\" (UniqueName: \"kubernetes.io/projected/16e6726b-1f13-4bd9-a6a0-326e726dd86a-kube-api-access-gwcfw\") pod \"kube-state-metrics-0\" (UID: \"16e6726b-1f13-4bd9-a6a0-326e726dd86a\") " pod="openstack/kube-state-metrics-0" Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.714139 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/16e6726b-1f13-4bd9-a6a0-326e726dd86a-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"16e6726b-1f13-4bd9-a6a0-326e726dd86a\") " pod="openstack/kube-state-metrics-0" Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.719547 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/16e6726b-1f13-4bd9-a6a0-326e726dd86a-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"16e6726b-1f13-4bd9-a6a0-326e726dd86a\") " pod="openstack/kube-state-metrics-0" Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.719842 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16e6726b-1f13-4bd9-a6a0-326e726dd86a-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"16e6726b-1f13-4bd9-a6a0-326e726dd86a\") " pod="openstack/kube-state-metrics-0" Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.721063 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/16e6726b-1f13-4bd9-a6a0-326e726dd86a-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"16e6726b-1f13-4bd9-a6a0-326e726dd86a\") " pod="openstack/kube-state-metrics-0" Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.732586 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwcfw\" (UniqueName: \"kubernetes.io/projected/16e6726b-1f13-4bd9-a6a0-326e726dd86a-kube-api-access-gwcfw\") pod \"kube-state-metrics-0\" (UID: \"16e6726b-1f13-4bd9-a6a0-326e726dd86a\") " pod="openstack/kube-state-metrics-0" Sep 30 12:39:36 crc kubenswrapper[5002]: I0930 12:39:36.786636 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 12:39:37 crc kubenswrapper[5002]: I0930 12:39:37.390137 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 12:39:37 crc kubenswrapper[5002]: W0930 12:39:37.393179 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16e6726b_1f13_4bd9_a6a0_326e726dd86a.slice/crio-b068f09d132da6c1d4e6186ea2311927ada0f847dc54d2d92106c359d5024ad4 WatchSource:0}: Error finding container b068f09d132da6c1d4e6186ea2311927ada0f847dc54d2d92106c359d5024ad4: Status 404 returned error can't find the container with id b068f09d132da6c1d4e6186ea2311927ada0f847dc54d2d92106c359d5024ad4 Sep 30 12:39:37 crc kubenswrapper[5002]: I0930 12:39:37.395668 5002 generic.go:334] "Generic (PLEG): container finished" podID="f36c00a3-1b8e-4f69-8690-7f91b49eb8e1" containerID="3a1c14dcb593b6a010604ddc317205c2511f3c4513f3967c31e4515dde283f42" exitCode=0 Sep 30 12:39:37 crc kubenswrapper[5002]: I0930 12:39:37.395700 5002 generic.go:334] "Generic (PLEG): container finished" podID="f36c00a3-1b8e-4f69-8690-7f91b49eb8e1" containerID="ceb447eb0439373b34f517918bce85b8fca65d503059fe5dac95e6792c06a058" exitCode=2 Sep 30 12:39:37 crc kubenswrapper[5002]: I0930 12:39:37.395709 5002 generic.go:334] "Generic (PLEG): container finished" podID="f36c00a3-1b8e-4f69-8690-7f91b49eb8e1" containerID="5e3451bb7d4e6357b90125dad5f9bd0db99e24a04ca4dd761c012c386e5e12fa" exitCode=0 Sep 30 12:39:37 crc kubenswrapper[5002]: I0930 12:39:37.395760 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1","Type":"ContainerDied","Data":"3a1c14dcb593b6a010604ddc317205c2511f3c4513f3967c31e4515dde283f42"} Sep 30 12:39:37 crc kubenswrapper[5002]: I0930 12:39:37.395793 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1","Type":"ContainerDied","Data":"ceb447eb0439373b34f517918bce85b8fca65d503059fe5dac95e6792c06a058"} Sep 30 12:39:37 crc kubenswrapper[5002]: I0930 12:39:37.395808 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1","Type":"ContainerDied","Data":"5e3451bb7d4e6357b90125dad5f9bd0db99e24a04ca4dd761c012c386e5e12fa"} Sep 30 12:39:38 crc kubenswrapper[5002]: I0930 12:39:38.411069 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"16e6726b-1f13-4bd9-a6a0-326e726dd86a","Type":"ContainerStarted","Data":"d8eabaf3048ba67c5d389b4174bdc5f1b3190bf4f3f91abdaba38bd38986a98e"} Sep 30 12:39:38 crc kubenswrapper[5002]: I0930 12:39:38.411728 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"16e6726b-1f13-4bd9-a6a0-326e726dd86a","Type":"ContainerStarted","Data":"b068f09d132da6c1d4e6186ea2311927ada0f847dc54d2d92106c359d5024ad4"} Sep 30 12:39:38 crc kubenswrapper[5002]: I0930 12:39:38.411800 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 30 12:39:38 crc kubenswrapper[5002]: I0930 12:39:38.447282 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.034391212 podStartE2EDuration="2.44725869s" podCreationTimestamp="2025-09-30 12:39:36 +0000 UTC" firstStartedPulling="2025-09-30 12:39:37.394871806 +0000 UTC m=+1151.644553952" lastFinishedPulling="2025-09-30 12:39:37.807739284 +0000 UTC m=+1152.057421430" observedRunningTime="2025-09-30 12:39:38.431995359 +0000 UTC m=+1152.681677515" watchObservedRunningTime="2025-09-30 12:39:38.44725869 +0000 UTC m=+1152.696940836" Sep 30 12:39:39 crc kubenswrapper[5002]: I0930 12:39:39.698563 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 30 12:39:39 crc kubenswrapper[5002]: I0930 12:39:39.740662 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 30 12:39:40 crc kubenswrapper[5002]: I0930 12:39:40.140682 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 12:39:40 crc kubenswrapper[5002]: I0930 12:39:40.140749 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 12:39:40 crc kubenswrapper[5002]: I0930 12:39:40.459069 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 30 12:39:40 crc kubenswrapper[5002]: I0930 12:39:40.891303 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.001928 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-config-data\") pod \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\" (UID: \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\") " Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.002005 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-sg-core-conf-yaml\") pod \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\" (UID: \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\") " Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.002038 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-run-httpd\") pod \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\" (UID: \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\") " Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.002097 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-log-httpd\") pod \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\" (UID: \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\") " Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.002138 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-frjjk\" (UniqueName: \"kubernetes.io/projected/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-kube-api-access-frjjk\") pod \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\" (UID: \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\") " Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.002234 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-scripts\") pod \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\" (UID: \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\") " Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.002338 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-combined-ca-bundle\") pod \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\" (UID: \"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1\") " Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.003220 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f36c00a3-1b8e-4f69-8690-7f91b49eb8e1" (UID: "f36c00a3-1b8e-4f69-8690-7f91b49eb8e1"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.003833 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f36c00a3-1b8e-4f69-8690-7f91b49eb8e1" (UID: "f36c00a3-1b8e-4f69-8690-7f91b49eb8e1"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.008743 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-scripts" (OuterVolumeSpecName: "scripts") pod "f36c00a3-1b8e-4f69-8690-7f91b49eb8e1" (UID: "f36c00a3-1b8e-4f69-8690-7f91b49eb8e1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.027937 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-kube-api-access-frjjk" (OuterVolumeSpecName: "kube-api-access-frjjk") pod "f36c00a3-1b8e-4f69-8690-7f91b49eb8e1" (UID: "f36c00a3-1b8e-4f69-8690-7f91b49eb8e1"). InnerVolumeSpecName "kube-api-access-frjjk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.061772 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f36c00a3-1b8e-4f69-8690-7f91b49eb8e1" (UID: "f36c00a3-1b8e-4f69-8690-7f91b49eb8e1"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.093966 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f36c00a3-1b8e-4f69-8690-7f91b49eb8e1" (UID: "f36c00a3-1b8e-4f69-8690-7f91b49eb8e1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.105136 5002 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.105175 5002 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.105187 5002 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.105199 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-frjjk\" (UniqueName: \"kubernetes.io/projected/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-kube-api-access-frjjk\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.105213 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.105226 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.133459 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-config-data" (OuterVolumeSpecName: "config-data") pod "f36c00a3-1b8e-4f69-8690-7f91b49eb8e1" (UID: "f36c00a3-1b8e-4f69-8690-7f91b49eb8e1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.206793 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.222662 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="e6cd1979-da09-4b1b-afa5-b8a10085918c" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.198:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.222743 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="e6cd1979-da09-4b1b-afa5-b8a10085918c" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.198:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.444360 5002 generic.go:334] "Generic (PLEG): container finished" podID="f36c00a3-1b8e-4f69-8690-7f91b49eb8e1" containerID="9f4340c61476e7e180916ae426077ec083f5f78303aab7b1bea55eb501713d9c" exitCode=0 Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.444440 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.444455 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1","Type":"ContainerDied","Data":"9f4340c61476e7e180916ae426077ec083f5f78303aab7b1bea55eb501713d9c"} Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.444775 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f36c00a3-1b8e-4f69-8690-7f91b49eb8e1","Type":"ContainerDied","Data":"d2ede50b14c6db96ded7e994beb6cedf59a961c04a05d967c99f1ae546ab5534"} Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.444800 5002 scope.go:117] "RemoveContainer" containerID="3a1c14dcb593b6a010604ddc317205c2511f3c4513f3967c31e4515dde283f42" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.492252 5002 scope.go:117] "RemoveContainer" containerID="ceb447eb0439373b34f517918bce85b8fca65d503059fe5dac95e6792c06a058" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.495401 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.510223 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.520229 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:39:41 crc kubenswrapper[5002]: E0930 12:39:41.520766 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f36c00a3-1b8e-4f69-8690-7f91b49eb8e1" containerName="ceilometer-central-agent" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.520787 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f36c00a3-1b8e-4f69-8690-7f91b49eb8e1" containerName="ceilometer-central-agent" Sep 30 12:39:41 crc kubenswrapper[5002]: E0930 12:39:41.520829 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f36c00a3-1b8e-4f69-8690-7f91b49eb8e1" containerName="proxy-httpd" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.520839 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f36c00a3-1b8e-4f69-8690-7f91b49eb8e1" containerName="proxy-httpd" Sep 30 12:39:41 crc kubenswrapper[5002]: E0930 12:39:41.520858 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f36c00a3-1b8e-4f69-8690-7f91b49eb8e1" containerName="sg-core" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.520865 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f36c00a3-1b8e-4f69-8690-7f91b49eb8e1" containerName="sg-core" Sep 30 12:39:41 crc kubenswrapper[5002]: E0930 12:39:41.520880 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f36c00a3-1b8e-4f69-8690-7f91b49eb8e1" containerName="ceilometer-notification-agent" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.520888 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f36c00a3-1b8e-4f69-8690-7f91b49eb8e1" containerName="ceilometer-notification-agent" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.521169 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f36c00a3-1b8e-4f69-8690-7f91b49eb8e1" containerName="sg-core" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.521203 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f36c00a3-1b8e-4f69-8690-7f91b49eb8e1" containerName="proxy-httpd" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.521221 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f36c00a3-1b8e-4f69-8690-7f91b49eb8e1" containerName="ceilometer-central-agent" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.521234 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f36c00a3-1b8e-4f69-8690-7f91b49eb8e1" containerName="ceilometer-notification-agent" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.523423 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.525516 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.526748 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.528923 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.529085 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.558851 5002 scope.go:117] "RemoveContainer" containerID="9f4340c61476e7e180916ae426077ec083f5f78303aab7b1bea55eb501713d9c" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.607084 5002 scope.go:117] "RemoveContainer" containerID="5e3451bb7d4e6357b90125dad5f9bd0db99e24a04ca4dd761c012c386e5e12fa" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.612396 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-scripts\") pod \"ceilometer-0\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " pod="openstack/ceilometer-0" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.612444 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-run-httpd\") pod \"ceilometer-0\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " pod="openstack/ceilometer-0" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.612466 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " pod="openstack/ceilometer-0" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.612503 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlb77\" (UniqueName: \"kubernetes.io/projected/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-kube-api-access-wlb77\") pod \"ceilometer-0\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " pod="openstack/ceilometer-0" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.612531 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " pod="openstack/ceilometer-0" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.612581 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " pod="openstack/ceilometer-0" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.612652 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-config-data\") pod \"ceilometer-0\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " pod="openstack/ceilometer-0" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.612668 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-log-httpd\") pod \"ceilometer-0\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " pod="openstack/ceilometer-0" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.623599 5002 scope.go:117] "RemoveContainer" containerID="3a1c14dcb593b6a010604ddc317205c2511f3c4513f3967c31e4515dde283f42" Sep 30 12:39:41 crc kubenswrapper[5002]: E0930 12:39:41.623924 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a1c14dcb593b6a010604ddc317205c2511f3c4513f3967c31e4515dde283f42\": container with ID starting with 3a1c14dcb593b6a010604ddc317205c2511f3c4513f3967c31e4515dde283f42 not found: ID does not exist" containerID="3a1c14dcb593b6a010604ddc317205c2511f3c4513f3967c31e4515dde283f42" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.623960 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a1c14dcb593b6a010604ddc317205c2511f3c4513f3967c31e4515dde283f42"} err="failed to get container status \"3a1c14dcb593b6a010604ddc317205c2511f3c4513f3967c31e4515dde283f42\": rpc error: code = NotFound desc = could not find container \"3a1c14dcb593b6a010604ddc317205c2511f3c4513f3967c31e4515dde283f42\": container with ID starting with 3a1c14dcb593b6a010604ddc317205c2511f3c4513f3967c31e4515dde283f42 not found: ID does not exist" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.623979 5002 scope.go:117] "RemoveContainer" containerID="ceb447eb0439373b34f517918bce85b8fca65d503059fe5dac95e6792c06a058" Sep 30 12:39:41 crc kubenswrapper[5002]: E0930 12:39:41.624256 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ceb447eb0439373b34f517918bce85b8fca65d503059fe5dac95e6792c06a058\": container with ID starting with ceb447eb0439373b34f517918bce85b8fca65d503059fe5dac95e6792c06a058 not found: ID does not exist" containerID="ceb447eb0439373b34f517918bce85b8fca65d503059fe5dac95e6792c06a058" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.624282 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ceb447eb0439373b34f517918bce85b8fca65d503059fe5dac95e6792c06a058"} err="failed to get container status \"ceb447eb0439373b34f517918bce85b8fca65d503059fe5dac95e6792c06a058\": rpc error: code = NotFound desc = could not find container \"ceb447eb0439373b34f517918bce85b8fca65d503059fe5dac95e6792c06a058\": container with ID starting with ceb447eb0439373b34f517918bce85b8fca65d503059fe5dac95e6792c06a058 not found: ID does not exist" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.624295 5002 scope.go:117] "RemoveContainer" containerID="9f4340c61476e7e180916ae426077ec083f5f78303aab7b1bea55eb501713d9c" Sep 30 12:39:41 crc kubenswrapper[5002]: E0930 12:39:41.624524 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f4340c61476e7e180916ae426077ec083f5f78303aab7b1bea55eb501713d9c\": container with ID starting with 9f4340c61476e7e180916ae426077ec083f5f78303aab7b1bea55eb501713d9c not found: ID does not exist" containerID="9f4340c61476e7e180916ae426077ec083f5f78303aab7b1bea55eb501713d9c" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.624553 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f4340c61476e7e180916ae426077ec083f5f78303aab7b1bea55eb501713d9c"} err="failed to get container status \"9f4340c61476e7e180916ae426077ec083f5f78303aab7b1bea55eb501713d9c\": rpc error: code = NotFound desc = could not find container \"9f4340c61476e7e180916ae426077ec083f5f78303aab7b1bea55eb501713d9c\": container with ID starting with 9f4340c61476e7e180916ae426077ec083f5f78303aab7b1bea55eb501713d9c not found: ID does not exist" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.624584 5002 scope.go:117] "RemoveContainer" containerID="5e3451bb7d4e6357b90125dad5f9bd0db99e24a04ca4dd761c012c386e5e12fa" Sep 30 12:39:41 crc kubenswrapper[5002]: E0930 12:39:41.624809 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e3451bb7d4e6357b90125dad5f9bd0db99e24a04ca4dd761c012c386e5e12fa\": container with ID starting with 5e3451bb7d4e6357b90125dad5f9bd0db99e24a04ca4dd761c012c386e5e12fa not found: ID does not exist" containerID="5e3451bb7d4e6357b90125dad5f9bd0db99e24a04ca4dd761c012c386e5e12fa" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.624835 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e3451bb7d4e6357b90125dad5f9bd0db99e24a04ca4dd761c012c386e5e12fa"} err="failed to get container status \"5e3451bb7d4e6357b90125dad5f9bd0db99e24a04ca4dd761c012c386e5e12fa\": rpc error: code = NotFound desc = could not find container \"5e3451bb7d4e6357b90125dad5f9bd0db99e24a04ca4dd761c012c386e5e12fa\": container with ID starting with 5e3451bb7d4e6357b90125dad5f9bd0db99e24a04ca4dd761c012c386e5e12fa not found: ID does not exist" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.713885 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-config-data\") pod \"ceilometer-0\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " pod="openstack/ceilometer-0" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.713932 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-log-httpd\") pod \"ceilometer-0\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " pod="openstack/ceilometer-0" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.714042 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-scripts\") pod \"ceilometer-0\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " pod="openstack/ceilometer-0" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.714078 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-run-httpd\") pod \"ceilometer-0\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " pod="openstack/ceilometer-0" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.714101 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " pod="openstack/ceilometer-0" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.714122 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlb77\" (UniqueName: \"kubernetes.io/projected/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-kube-api-access-wlb77\") pod \"ceilometer-0\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " pod="openstack/ceilometer-0" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.714159 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " pod="openstack/ceilometer-0" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.714217 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " pod="openstack/ceilometer-0" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.715233 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-run-httpd\") pod \"ceilometer-0\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " pod="openstack/ceilometer-0" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.715280 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-log-httpd\") pod \"ceilometer-0\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " pod="openstack/ceilometer-0" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.721962 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " pod="openstack/ceilometer-0" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.722187 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " pod="openstack/ceilometer-0" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.722190 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-scripts\") pod \"ceilometer-0\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " pod="openstack/ceilometer-0" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.723076 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-config-data\") pod \"ceilometer-0\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " pod="openstack/ceilometer-0" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.732765 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " pod="openstack/ceilometer-0" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.734995 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlb77\" (UniqueName: \"kubernetes.io/projected/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-kube-api-access-wlb77\") pod \"ceilometer-0\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " pod="openstack/ceilometer-0" Sep 30 12:39:41 crc kubenswrapper[5002]: I0930 12:39:41.901927 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:39:42 crc kubenswrapper[5002]: I0930 12:39:42.364986 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:39:42 crc kubenswrapper[5002]: I0930 12:39:42.456228 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5","Type":"ContainerStarted","Data":"7f04ab2429dafd756593b0278c156d2f326a7d893abbd04407ccfaa70e586a52"} Sep 30 12:39:42 crc kubenswrapper[5002]: I0930 12:39:42.686563 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f36c00a3-1b8e-4f69-8690-7f91b49eb8e1" path="/var/lib/kubelet/pods/f36c00a3-1b8e-4f69-8690-7f91b49eb8e1/volumes" Sep 30 12:39:43 crc kubenswrapper[5002]: I0930 12:39:43.482078 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5","Type":"ContainerStarted","Data":"c7395488a9b0d10b6ea0867f42875667e77652f70b0e0597b6b1f4b95f2120cb"} Sep 30 12:39:44 crc kubenswrapper[5002]: I0930 12:39:44.491191 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5","Type":"ContainerStarted","Data":"af6ddd5593a215979eafaa1aa8d3be9d13edce36adfe1953169fdfd29351d1b9"} Sep 30 12:39:44 crc kubenswrapper[5002]: I0930 12:39:44.642461 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 12:39:44 crc kubenswrapper[5002]: I0930 12:39:44.644663 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 12:39:44 crc kubenswrapper[5002]: I0930 12:39:44.651122 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 12:39:45 crc kubenswrapper[5002]: I0930 12:39:45.504744 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5","Type":"ContainerStarted","Data":"4f6e2428e4297684592f737a3a8940f89b8611b02c6192650fad1dc5ffdcda66"} Sep 30 12:39:45 crc kubenswrapper[5002]: I0930 12:39:45.510779 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 12:39:46 crc kubenswrapper[5002]: I0930 12:39:46.517134 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5","Type":"ContainerStarted","Data":"6921a273fea651a9f299c26e6b72917ebcc691e284c348de4c53875890f0a86f"} Sep 30 12:39:46 crc kubenswrapper[5002]: I0930 12:39:46.547405 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.874578587 podStartE2EDuration="5.547384645s" podCreationTimestamp="2025-09-30 12:39:41 +0000 UTC" firstStartedPulling="2025-09-30 12:39:42.371523499 +0000 UTC m=+1156.621205655" lastFinishedPulling="2025-09-30 12:39:46.044329557 +0000 UTC m=+1160.294011713" observedRunningTime="2025-09-30 12:39:46.539127247 +0000 UTC m=+1160.788809393" watchObservedRunningTime="2025-09-30 12:39:46.547384645 +0000 UTC m=+1160.797066811" Sep 30 12:39:46 crc kubenswrapper[5002]: I0930 12:39:46.798310 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 30 12:39:47 crc kubenswrapper[5002]: I0930 12:39:47.506781 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:47 crc kubenswrapper[5002]: I0930 12:39:47.528612 5002 generic.go:334] "Generic (PLEG): container finished" podID="7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09" containerID="66188d42a7f481568dd66d6eca5ad90d8ee5cc340f9ccfcff45d059c45326067" exitCode=137 Sep 30 12:39:47 crc kubenswrapper[5002]: I0930 12:39:47.528665 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:47 crc kubenswrapper[5002]: I0930 12:39:47.528713 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09","Type":"ContainerDied","Data":"66188d42a7f481568dd66d6eca5ad90d8ee5cc340f9ccfcff45d059c45326067"} Sep 30 12:39:47 crc kubenswrapper[5002]: I0930 12:39:47.528750 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09","Type":"ContainerDied","Data":"1c394baac9c21398515b7c0a8edab83399bb4fc45c3a318dee217ca73d29da35"} Sep 30 12:39:47 crc kubenswrapper[5002]: I0930 12:39:47.528776 5002 scope.go:117] "RemoveContainer" containerID="66188d42a7f481568dd66d6eca5ad90d8ee5cc340f9ccfcff45d059c45326067" Sep 30 12:39:47 crc kubenswrapper[5002]: I0930 12:39:47.529770 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 12:39:47 crc kubenswrapper[5002]: I0930 12:39:47.554033 5002 scope.go:117] "RemoveContainer" containerID="66188d42a7f481568dd66d6eca5ad90d8ee5cc340f9ccfcff45d059c45326067" Sep 30 12:39:47 crc kubenswrapper[5002]: E0930 12:39:47.554428 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66188d42a7f481568dd66d6eca5ad90d8ee5cc340f9ccfcff45d059c45326067\": container with ID starting with 66188d42a7f481568dd66d6eca5ad90d8ee5cc340f9ccfcff45d059c45326067 not found: ID does not exist" containerID="66188d42a7f481568dd66d6eca5ad90d8ee5cc340f9ccfcff45d059c45326067" Sep 30 12:39:47 crc kubenswrapper[5002]: I0930 12:39:47.554498 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66188d42a7f481568dd66d6eca5ad90d8ee5cc340f9ccfcff45d059c45326067"} err="failed to get container status \"66188d42a7f481568dd66d6eca5ad90d8ee5cc340f9ccfcff45d059c45326067\": rpc error: code = NotFound desc = could not find container \"66188d42a7f481568dd66d6eca5ad90d8ee5cc340f9ccfcff45d059c45326067\": container with ID starting with 66188d42a7f481568dd66d6eca5ad90d8ee5cc340f9ccfcff45d059c45326067 not found: ID does not exist" Sep 30 12:39:47 crc kubenswrapper[5002]: I0930 12:39:47.640909 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tmmwj\" (UniqueName: \"kubernetes.io/projected/7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09-kube-api-access-tmmwj\") pod \"7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09\" (UID: \"7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09\") " Sep 30 12:39:47 crc kubenswrapper[5002]: I0930 12:39:47.641171 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09-combined-ca-bundle\") pod \"7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09\" (UID: \"7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09\") " Sep 30 12:39:47 crc kubenswrapper[5002]: I0930 12:39:47.641317 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09-config-data\") pod \"7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09\" (UID: \"7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09\") " Sep 30 12:39:47 crc kubenswrapper[5002]: I0930 12:39:47.665711 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09-kube-api-access-tmmwj" (OuterVolumeSpecName: "kube-api-access-tmmwj") pod "7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09" (UID: "7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09"). InnerVolumeSpecName "kube-api-access-tmmwj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:39:47 crc kubenswrapper[5002]: I0930 12:39:47.674091 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09-config-data" (OuterVolumeSpecName: "config-data") pod "7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09" (UID: "7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:47 crc kubenswrapper[5002]: I0930 12:39:47.690562 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09" (UID: "7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:47 crc kubenswrapper[5002]: I0930 12:39:47.743668 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tmmwj\" (UniqueName: \"kubernetes.io/projected/7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09-kube-api-access-tmmwj\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:47 crc kubenswrapper[5002]: I0930 12:39:47.743705 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:47 crc kubenswrapper[5002]: I0930 12:39:47.743718 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:47 crc kubenswrapper[5002]: I0930 12:39:47.886266 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 12:39:47 crc kubenswrapper[5002]: I0930 12:39:47.896118 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 12:39:47 crc kubenswrapper[5002]: I0930 12:39:47.909175 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 12:39:47 crc kubenswrapper[5002]: E0930 12:39:47.910011 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 12:39:47 crc kubenswrapper[5002]: I0930 12:39:47.910050 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 12:39:47 crc kubenswrapper[5002]: I0930 12:39:47.910574 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 12:39:47 crc kubenswrapper[5002]: I0930 12:39:47.911958 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:47 crc kubenswrapper[5002]: I0930 12:39:47.917631 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 30 12:39:47 crc kubenswrapper[5002]: I0930 12:39:47.917781 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Sep 30 12:39:47 crc kubenswrapper[5002]: I0930 12:39:47.919166 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Sep 30 12:39:47 crc kubenswrapper[5002]: I0930 12:39:47.922650 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 12:39:48 crc kubenswrapper[5002]: I0930 12:39:48.049089 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18ab92e5-cde3-4728-9782-42025fa3d6b4-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"18ab92e5-cde3-4728-9782-42025fa3d6b4\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:48 crc kubenswrapper[5002]: I0930 12:39:48.049213 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/18ab92e5-cde3-4728-9782-42025fa3d6b4-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"18ab92e5-cde3-4728-9782-42025fa3d6b4\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:48 crc kubenswrapper[5002]: I0930 12:39:48.049287 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4v77\" (UniqueName: \"kubernetes.io/projected/18ab92e5-cde3-4728-9782-42025fa3d6b4-kube-api-access-j4v77\") pod \"nova-cell1-novncproxy-0\" (UID: \"18ab92e5-cde3-4728-9782-42025fa3d6b4\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:48 crc kubenswrapper[5002]: I0930 12:39:48.049444 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/18ab92e5-cde3-4728-9782-42025fa3d6b4-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"18ab92e5-cde3-4728-9782-42025fa3d6b4\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:48 crc kubenswrapper[5002]: I0930 12:39:48.049528 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18ab92e5-cde3-4728-9782-42025fa3d6b4-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"18ab92e5-cde3-4728-9782-42025fa3d6b4\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:48 crc kubenswrapper[5002]: I0930 12:39:48.151678 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4v77\" (UniqueName: \"kubernetes.io/projected/18ab92e5-cde3-4728-9782-42025fa3d6b4-kube-api-access-j4v77\") pod \"nova-cell1-novncproxy-0\" (UID: \"18ab92e5-cde3-4728-9782-42025fa3d6b4\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:48 crc kubenswrapper[5002]: I0930 12:39:48.151809 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/18ab92e5-cde3-4728-9782-42025fa3d6b4-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"18ab92e5-cde3-4728-9782-42025fa3d6b4\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:48 crc kubenswrapper[5002]: I0930 12:39:48.151920 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18ab92e5-cde3-4728-9782-42025fa3d6b4-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"18ab92e5-cde3-4728-9782-42025fa3d6b4\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:48 crc kubenswrapper[5002]: I0930 12:39:48.151957 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18ab92e5-cde3-4728-9782-42025fa3d6b4-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"18ab92e5-cde3-4728-9782-42025fa3d6b4\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:48 crc kubenswrapper[5002]: I0930 12:39:48.152018 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/18ab92e5-cde3-4728-9782-42025fa3d6b4-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"18ab92e5-cde3-4728-9782-42025fa3d6b4\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:48 crc kubenswrapper[5002]: I0930 12:39:48.155678 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18ab92e5-cde3-4728-9782-42025fa3d6b4-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"18ab92e5-cde3-4728-9782-42025fa3d6b4\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:48 crc kubenswrapper[5002]: I0930 12:39:48.156909 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18ab92e5-cde3-4728-9782-42025fa3d6b4-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"18ab92e5-cde3-4728-9782-42025fa3d6b4\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:48 crc kubenswrapper[5002]: I0930 12:39:48.159046 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/18ab92e5-cde3-4728-9782-42025fa3d6b4-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"18ab92e5-cde3-4728-9782-42025fa3d6b4\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:48 crc kubenswrapper[5002]: I0930 12:39:48.165323 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/18ab92e5-cde3-4728-9782-42025fa3d6b4-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"18ab92e5-cde3-4728-9782-42025fa3d6b4\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:48 crc kubenswrapper[5002]: I0930 12:39:48.167147 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4v77\" (UniqueName: \"kubernetes.io/projected/18ab92e5-cde3-4728-9782-42025fa3d6b4-kube-api-access-j4v77\") pod \"nova-cell1-novncproxy-0\" (UID: \"18ab92e5-cde3-4728-9782-42025fa3d6b4\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:48 crc kubenswrapper[5002]: I0930 12:39:48.240655 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:48 crc kubenswrapper[5002]: I0930 12:39:48.694264 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09" path="/var/lib/kubelet/pods/7d675115-dfa8-46a9-8fdf-b9c5e6ef2d09/volumes" Sep 30 12:39:48 crc kubenswrapper[5002]: I0930 12:39:48.704968 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 12:39:49 crc kubenswrapper[5002]: I0930 12:39:49.551133 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"18ab92e5-cde3-4728-9782-42025fa3d6b4","Type":"ContainerStarted","Data":"74c3c843129e5e0e60072bf128ee22f1f3087d3d3e89111f943b29bfc85422af"} Sep 30 12:39:49 crc kubenswrapper[5002]: I0930 12:39:49.551582 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"18ab92e5-cde3-4728-9782-42025fa3d6b4","Type":"ContainerStarted","Data":"ce840d3ba6c9ceb2ba57b16780fb10e2995bbcaad0179502f07ac77b5966fbdf"} Sep 30 12:39:49 crc kubenswrapper[5002]: I0930 12:39:49.572422 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.572401928 podStartE2EDuration="2.572401928s" podCreationTimestamp="2025-09-30 12:39:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:39:49.567929524 +0000 UTC m=+1163.817611680" watchObservedRunningTime="2025-09-30 12:39:49.572401928 +0000 UTC m=+1163.822084074" Sep 30 12:39:50 crc kubenswrapper[5002]: I0930 12:39:50.146677 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 12:39:50 crc kubenswrapper[5002]: I0930 12:39:50.149730 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 12:39:50 crc kubenswrapper[5002]: I0930 12:39:50.150528 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 12:39:50 crc kubenswrapper[5002]: I0930 12:39:50.155782 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 12:39:50 crc kubenswrapper[5002]: I0930 12:39:50.559073 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 12:39:50 crc kubenswrapper[5002]: I0930 12:39:50.562400 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 12:39:50 crc kubenswrapper[5002]: I0930 12:39:50.752234 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-sh88w"] Sep 30 12:39:50 crc kubenswrapper[5002]: I0930 12:39:50.754451 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" Sep 30 12:39:50 crc kubenswrapper[5002]: I0930 12:39:50.762464 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-sh88w"] Sep 30 12:39:50 crc kubenswrapper[5002]: I0930 12:39:50.912025 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-config\") pod \"dnsmasq-dns-59cf4bdb65-sh88w\" (UID: \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\") " pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" Sep 30 12:39:50 crc kubenswrapper[5002]: I0930 12:39:50.912331 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mm6k6\" (UniqueName: \"kubernetes.io/projected/c2244511-4d5b-4162-a0f3-e5c8e89781f1-kube-api-access-mm6k6\") pod \"dnsmasq-dns-59cf4bdb65-sh88w\" (UID: \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\") " pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" Sep 30 12:39:50 crc kubenswrapper[5002]: I0930 12:39:50.912362 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-sh88w\" (UID: \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\") " pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" Sep 30 12:39:50 crc kubenswrapper[5002]: I0930 12:39:50.912492 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-sh88w\" (UID: \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\") " pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" Sep 30 12:39:50 crc kubenswrapper[5002]: I0930 12:39:50.912517 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-sh88w\" (UID: \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\") " pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" Sep 30 12:39:50 crc kubenswrapper[5002]: I0930 12:39:50.912539 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-sh88w\" (UID: \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\") " pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" Sep 30 12:39:51 crc kubenswrapper[5002]: I0930 12:39:51.014502 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-sh88w\" (UID: \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\") " pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" Sep 30 12:39:51 crc kubenswrapper[5002]: I0930 12:39:51.015006 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-sh88w\" (UID: \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\") " pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" Sep 30 12:39:51 crc kubenswrapper[5002]: I0930 12:39:51.015095 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-sh88w\" (UID: \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\") " pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" Sep 30 12:39:51 crc kubenswrapper[5002]: I0930 12:39:51.015222 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-config\") pod \"dnsmasq-dns-59cf4bdb65-sh88w\" (UID: \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\") " pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" Sep 30 12:39:51 crc kubenswrapper[5002]: I0930 12:39:51.015311 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mm6k6\" (UniqueName: \"kubernetes.io/projected/c2244511-4d5b-4162-a0f3-e5c8e89781f1-kube-api-access-mm6k6\") pod \"dnsmasq-dns-59cf4bdb65-sh88w\" (UID: \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\") " pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" Sep 30 12:39:51 crc kubenswrapper[5002]: I0930 12:39:51.015392 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-sh88w\" (UID: \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\") " pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" Sep 30 12:39:51 crc kubenswrapper[5002]: I0930 12:39:51.015430 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-sh88w\" (UID: \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\") " pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" Sep 30 12:39:51 crc kubenswrapper[5002]: I0930 12:39:51.015976 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-config\") pod \"dnsmasq-dns-59cf4bdb65-sh88w\" (UID: \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\") " pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" Sep 30 12:39:51 crc kubenswrapper[5002]: I0930 12:39:51.016260 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-sh88w\" (UID: \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\") " pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" Sep 30 12:39:51 crc kubenswrapper[5002]: I0930 12:39:51.016306 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-sh88w\" (UID: \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\") " pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" Sep 30 12:39:51 crc kubenswrapper[5002]: I0930 12:39:51.016532 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-sh88w\" (UID: \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\") " pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" Sep 30 12:39:51 crc kubenswrapper[5002]: I0930 12:39:51.038994 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mm6k6\" (UniqueName: \"kubernetes.io/projected/c2244511-4d5b-4162-a0f3-e5c8e89781f1-kube-api-access-mm6k6\") pod \"dnsmasq-dns-59cf4bdb65-sh88w\" (UID: \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\") " pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" Sep 30 12:39:51 crc kubenswrapper[5002]: I0930 12:39:51.086079 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" Sep 30 12:39:51 crc kubenswrapper[5002]: I0930 12:39:51.369446 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-sh88w"] Sep 30 12:39:51 crc kubenswrapper[5002]: I0930 12:39:51.566364 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" event={"ID":"c2244511-4d5b-4162-a0f3-e5c8e89781f1","Type":"ContainerStarted","Data":"589ae39deefc1cc09453554cf6c0d9b74c7aa6507b18c7fd5cf8c6e62167bb40"} Sep 30 12:39:52 crc kubenswrapper[5002]: I0930 12:39:52.576950 5002 generic.go:334] "Generic (PLEG): container finished" podID="c2244511-4d5b-4162-a0f3-e5c8e89781f1" containerID="ae633a35d36118945715011b877b6c72db0305486d3ffcbf658df54847155ce1" exitCode=0 Sep 30 12:39:52 crc kubenswrapper[5002]: I0930 12:39:52.578666 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" event={"ID":"c2244511-4d5b-4162-a0f3-e5c8e89781f1","Type":"ContainerDied","Data":"ae633a35d36118945715011b877b6c72db0305486d3ffcbf658df54847155ce1"} Sep 30 12:39:52 crc kubenswrapper[5002]: I0930 12:39:52.935999 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:39:52 crc kubenswrapper[5002]: I0930 12:39:52.936629 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="71f3b49b-1ea5-4f00-b250-a62eb24bd7f5" containerName="sg-core" containerID="cri-o://4f6e2428e4297684592f737a3a8940f89b8611b02c6192650fad1dc5ffdcda66" gracePeriod=30 Sep 30 12:39:52 crc kubenswrapper[5002]: I0930 12:39:52.936673 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="71f3b49b-1ea5-4f00-b250-a62eb24bd7f5" containerName="ceilometer-notification-agent" containerID="cri-o://af6ddd5593a215979eafaa1aa8d3be9d13edce36adfe1953169fdfd29351d1b9" gracePeriod=30 Sep 30 12:39:52 crc kubenswrapper[5002]: I0930 12:39:52.936758 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="71f3b49b-1ea5-4f00-b250-a62eb24bd7f5" containerName="proxy-httpd" containerID="cri-o://6921a273fea651a9f299c26e6b72917ebcc691e284c348de4c53875890f0a86f" gracePeriod=30 Sep 30 12:39:52 crc kubenswrapper[5002]: I0930 12:39:52.936594 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="71f3b49b-1ea5-4f00-b250-a62eb24bd7f5" containerName="ceilometer-central-agent" containerID="cri-o://c7395488a9b0d10b6ea0867f42875667e77652f70b0e0597b6b1f4b95f2120cb" gracePeriod=30 Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.211609 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.242147 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.593008 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" event={"ID":"c2244511-4d5b-4162-a0f3-e5c8e89781f1","Type":"ContainerStarted","Data":"7222b212b9650df669f53aa165030071b2723fbbd90068c50ac75f46ccbcc22c"} Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.593425 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.607391 5002 generic.go:334] "Generic (PLEG): container finished" podID="71f3b49b-1ea5-4f00-b250-a62eb24bd7f5" containerID="6921a273fea651a9f299c26e6b72917ebcc691e284c348de4c53875890f0a86f" exitCode=0 Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.607446 5002 generic.go:334] "Generic (PLEG): container finished" podID="71f3b49b-1ea5-4f00-b250-a62eb24bd7f5" containerID="4f6e2428e4297684592f737a3a8940f89b8611b02c6192650fad1dc5ffdcda66" exitCode=2 Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.607461 5002 generic.go:334] "Generic (PLEG): container finished" podID="71f3b49b-1ea5-4f00-b250-a62eb24bd7f5" containerID="af6ddd5593a215979eafaa1aa8d3be9d13edce36adfe1953169fdfd29351d1b9" exitCode=0 Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.607530 5002 generic.go:334] "Generic (PLEG): container finished" podID="71f3b49b-1ea5-4f00-b250-a62eb24bd7f5" containerID="c7395488a9b0d10b6ea0867f42875667e77652f70b0e0597b6b1f4b95f2120cb" exitCode=0 Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.607710 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5","Type":"ContainerDied","Data":"6921a273fea651a9f299c26e6b72917ebcc691e284c348de4c53875890f0a86f"} Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.607772 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5","Type":"ContainerDied","Data":"4f6e2428e4297684592f737a3a8940f89b8611b02c6192650fad1dc5ffdcda66"} Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.607786 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5","Type":"ContainerDied","Data":"af6ddd5593a215979eafaa1aa8d3be9d13edce36adfe1953169fdfd29351d1b9"} Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.607797 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5","Type":"ContainerDied","Data":"c7395488a9b0d10b6ea0867f42875667e77652f70b0e0597b6b1f4b95f2120cb"} Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.607802 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="e6cd1979-da09-4b1b-afa5-b8a10085918c" containerName="nova-api-log" containerID="cri-o://af68c7c8f22bd4d0756157902404dcece595bc7791982f00a673cd24d69bb23f" gracePeriod=30 Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.607878 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="e6cd1979-da09-4b1b-afa5-b8a10085918c" containerName="nova-api-api" containerID="cri-o://21360592d52e32cbc823f9b85082078f08331f6f682522de8ef6ea0e02afd5c8" gracePeriod=30 Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.649113 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" podStartSLOduration=3.649089106 podStartE2EDuration="3.649089106s" podCreationTimestamp="2025-09-30 12:39:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:39:53.631582433 +0000 UTC m=+1167.881264599" watchObservedRunningTime="2025-09-30 12:39:53.649089106 +0000 UTC m=+1167.898771252" Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.739747 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.899773 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-sg-core-conf-yaml\") pod \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.899969 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-combined-ca-bundle\") pod \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.900034 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-config-data\") pod \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.900065 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-scripts\") pod \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.900102 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-run-httpd\") pod \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.900130 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-log-httpd\") pod \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.900206 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wlb77\" (UniqueName: \"kubernetes.io/projected/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-kube-api-access-wlb77\") pod \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.900310 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-ceilometer-tls-certs\") pod \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\" (UID: \"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5\") " Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.900761 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "71f3b49b-1ea5-4f00-b250-a62eb24bd7f5" (UID: "71f3b49b-1ea5-4f00-b250-a62eb24bd7f5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.901116 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "71f3b49b-1ea5-4f00-b250-a62eb24bd7f5" (UID: "71f3b49b-1ea5-4f00-b250-a62eb24bd7f5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.905641 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-kube-api-access-wlb77" (OuterVolumeSpecName: "kube-api-access-wlb77") pod "71f3b49b-1ea5-4f00-b250-a62eb24bd7f5" (UID: "71f3b49b-1ea5-4f00-b250-a62eb24bd7f5"). InnerVolumeSpecName "kube-api-access-wlb77". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.908632 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-scripts" (OuterVolumeSpecName: "scripts") pod "71f3b49b-1ea5-4f00-b250-a62eb24bd7f5" (UID: "71f3b49b-1ea5-4f00-b250-a62eb24bd7f5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.958198 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "71f3b49b-1ea5-4f00-b250-a62eb24bd7f5" (UID: "71f3b49b-1ea5-4f00-b250-a62eb24bd7f5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.975317 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "71f3b49b-1ea5-4f00-b250-a62eb24bd7f5" (UID: "71f3b49b-1ea5-4f00-b250-a62eb24bd7f5"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:53 crc kubenswrapper[5002]: I0930 12:39:53.995571 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "71f3b49b-1ea5-4f00-b250-a62eb24bd7f5" (UID: "71f3b49b-1ea5-4f00-b250-a62eb24bd7f5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.002059 5002 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.002089 5002 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.002098 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.002107 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.002116 5002 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.002124 5002 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.002131 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wlb77\" (UniqueName: \"kubernetes.io/projected/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-kube-api-access-wlb77\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.022323 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-config-data" (OuterVolumeSpecName: "config-data") pod "71f3b49b-1ea5-4f00-b250-a62eb24bd7f5" (UID: "71f3b49b-1ea5-4f00-b250-a62eb24bd7f5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.104146 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.619021 5002 generic.go:334] "Generic (PLEG): container finished" podID="e6cd1979-da09-4b1b-afa5-b8a10085918c" containerID="af68c7c8f22bd4d0756157902404dcece595bc7791982f00a673cd24d69bb23f" exitCode=143 Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.619201 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e6cd1979-da09-4b1b-afa5-b8a10085918c","Type":"ContainerDied","Data":"af68c7c8f22bd4d0756157902404dcece595bc7791982f00a673cd24d69bb23f"} Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.622444 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"71f3b49b-1ea5-4f00-b250-a62eb24bd7f5","Type":"ContainerDied","Data":"7f04ab2429dafd756593b0278c156d2f326a7d893abbd04407ccfaa70e586a52"} Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.622516 5002 scope.go:117] "RemoveContainer" containerID="6921a273fea651a9f299c26e6b72917ebcc691e284c348de4c53875890f0a86f" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.622580 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.670039 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.686068 5002 scope.go:117] "RemoveContainer" containerID="4f6e2428e4297684592f737a3a8940f89b8611b02c6192650fad1dc5ffdcda66" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.706961 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.707003 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:39:54 crc kubenswrapper[5002]: E0930 12:39:54.707313 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71f3b49b-1ea5-4f00-b250-a62eb24bd7f5" containerName="sg-core" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.707333 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="71f3b49b-1ea5-4f00-b250-a62eb24bd7f5" containerName="sg-core" Sep 30 12:39:54 crc kubenswrapper[5002]: E0930 12:39:54.707367 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71f3b49b-1ea5-4f00-b250-a62eb24bd7f5" containerName="ceilometer-notification-agent" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.707376 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="71f3b49b-1ea5-4f00-b250-a62eb24bd7f5" containerName="ceilometer-notification-agent" Sep 30 12:39:54 crc kubenswrapper[5002]: E0930 12:39:54.707404 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71f3b49b-1ea5-4f00-b250-a62eb24bd7f5" containerName="ceilometer-central-agent" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.707412 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="71f3b49b-1ea5-4f00-b250-a62eb24bd7f5" containerName="ceilometer-central-agent" Sep 30 12:39:54 crc kubenswrapper[5002]: E0930 12:39:54.707422 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71f3b49b-1ea5-4f00-b250-a62eb24bd7f5" containerName="proxy-httpd" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.707429 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="71f3b49b-1ea5-4f00-b250-a62eb24bd7f5" containerName="proxy-httpd" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.707678 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="71f3b49b-1ea5-4f00-b250-a62eb24bd7f5" containerName="proxy-httpd" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.707702 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="71f3b49b-1ea5-4f00-b250-a62eb24bd7f5" containerName="ceilometer-notification-agent" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.707720 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="71f3b49b-1ea5-4f00-b250-a62eb24bd7f5" containerName="ceilometer-central-agent" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.707729 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="71f3b49b-1ea5-4f00-b250-a62eb24bd7f5" containerName="sg-core" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.709569 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.709669 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.712255 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.712660 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.712739 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.736598 5002 scope.go:117] "RemoveContainer" containerID="af6ddd5593a215979eafaa1aa8d3be9d13edce36adfe1953169fdfd29351d1b9" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.759155 5002 scope.go:117] "RemoveContainer" containerID="c7395488a9b0d10b6ea0867f42875667e77652f70b0e0597b6b1f4b95f2120cb" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.816256 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " pod="openstack/ceilometer-0" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.816302 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " pod="openstack/ceilometer-0" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.816321 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-config-data\") pod \"ceilometer-0\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " pod="openstack/ceilometer-0" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.816354 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/295b654f-3031-4e18-9d39-12fbc8e739a5-log-httpd\") pod \"ceilometer-0\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " pod="openstack/ceilometer-0" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.816510 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/295b654f-3031-4e18-9d39-12fbc8e739a5-run-httpd\") pod \"ceilometer-0\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " pod="openstack/ceilometer-0" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.816543 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " pod="openstack/ceilometer-0" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.816577 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-scripts\") pod \"ceilometer-0\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " pod="openstack/ceilometer-0" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.816668 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9n8wx\" (UniqueName: \"kubernetes.io/projected/295b654f-3031-4e18-9d39-12fbc8e739a5-kube-api-access-9n8wx\") pod \"ceilometer-0\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " pod="openstack/ceilometer-0" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.866427 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:39:54 crc kubenswrapper[5002]: E0930 12:39:54.867118 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[ceilometer-tls-certs combined-ca-bundle config-data kube-api-access-9n8wx log-httpd run-httpd scripts sg-core-conf-yaml], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/ceilometer-0" podUID="295b654f-3031-4e18-9d39-12fbc8e739a5" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.918022 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/295b654f-3031-4e18-9d39-12fbc8e739a5-run-httpd\") pod \"ceilometer-0\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " pod="openstack/ceilometer-0" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.918387 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/295b654f-3031-4e18-9d39-12fbc8e739a5-run-httpd\") pod \"ceilometer-0\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " pod="openstack/ceilometer-0" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.918686 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " pod="openstack/ceilometer-0" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.918730 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-scripts\") pod \"ceilometer-0\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " pod="openstack/ceilometer-0" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.919547 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9n8wx\" (UniqueName: \"kubernetes.io/projected/295b654f-3031-4e18-9d39-12fbc8e739a5-kube-api-access-9n8wx\") pod \"ceilometer-0\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " pod="openstack/ceilometer-0" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.919622 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " pod="openstack/ceilometer-0" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.919652 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " pod="openstack/ceilometer-0" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.919672 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-config-data\") pod \"ceilometer-0\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " pod="openstack/ceilometer-0" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.919712 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/295b654f-3031-4e18-9d39-12fbc8e739a5-log-httpd\") pod \"ceilometer-0\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " pod="openstack/ceilometer-0" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.920183 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/295b654f-3031-4e18-9d39-12fbc8e739a5-log-httpd\") pod \"ceilometer-0\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " pod="openstack/ceilometer-0" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.924992 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " pod="openstack/ceilometer-0" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.925310 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-scripts\") pod \"ceilometer-0\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " pod="openstack/ceilometer-0" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.925323 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-config-data\") pod \"ceilometer-0\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " pod="openstack/ceilometer-0" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.925390 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " pod="openstack/ceilometer-0" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.925398 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " pod="openstack/ceilometer-0" Sep 30 12:39:54 crc kubenswrapper[5002]: I0930 12:39:54.940514 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9n8wx\" (UniqueName: \"kubernetes.io/projected/295b654f-3031-4e18-9d39-12fbc8e739a5-kube-api-access-9n8wx\") pod \"ceilometer-0\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " pod="openstack/ceilometer-0" Sep 30 12:39:55 crc kubenswrapper[5002]: I0930 12:39:55.633651 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:39:55 crc kubenswrapper[5002]: I0930 12:39:55.648713 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:39:55 crc kubenswrapper[5002]: I0930 12:39:55.837533 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-scripts\") pod \"295b654f-3031-4e18-9d39-12fbc8e739a5\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " Sep 30 12:39:55 crc kubenswrapper[5002]: I0930 12:39:55.837638 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-config-data\") pod \"295b654f-3031-4e18-9d39-12fbc8e739a5\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " Sep 30 12:39:55 crc kubenswrapper[5002]: I0930 12:39:55.837696 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9n8wx\" (UniqueName: \"kubernetes.io/projected/295b654f-3031-4e18-9d39-12fbc8e739a5-kube-api-access-9n8wx\") pod \"295b654f-3031-4e18-9d39-12fbc8e739a5\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " Sep 30 12:39:55 crc kubenswrapper[5002]: I0930 12:39:55.837761 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/295b654f-3031-4e18-9d39-12fbc8e739a5-run-httpd\") pod \"295b654f-3031-4e18-9d39-12fbc8e739a5\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " Sep 30 12:39:55 crc kubenswrapper[5002]: I0930 12:39:55.837840 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-combined-ca-bundle\") pod \"295b654f-3031-4e18-9d39-12fbc8e739a5\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " Sep 30 12:39:55 crc kubenswrapper[5002]: I0930 12:39:55.837877 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/295b654f-3031-4e18-9d39-12fbc8e739a5-log-httpd\") pod \"295b654f-3031-4e18-9d39-12fbc8e739a5\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " Sep 30 12:39:55 crc kubenswrapper[5002]: I0930 12:39:55.837913 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-ceilometer-tls-certs\") pod \"295b654f-3031-4e18-9d39-12fbc8e739a5\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " Sep 30 12:39:55 crc kubenswrapper[5002]: I0930 12:39:55.837955 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-sg-core-conf-yaml\") pod \"295b654f-3031-4e18-9d39-12fbc8e739a5\" (UID: \"295b654f-3031-4e18-9d39-12fbc8e739a5\") " Sep 30 12:39:55 crc kubenswrapper[5002]: I0930 12:39:55.838086 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/295b654f-3031-4e18-9d39-12fbc8e739a5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "295b654f-3031-4e18-9d39-12fbc8e739a5" (UID: "295b654f-3031-4e18-9d39-12fbc8e739a5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:39:55 crc kubenswrapper[5002]: I0930 12:39:55.838173 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/295b654f-3031-4e18-9d39-12fbc8e739a5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "295b654f-3031-4e18-9d39-12fbc8e739a5" (UID: "295b654f-3031-4e18-9d39-12fbc8e739a5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:39:55 crc kubenswrapper[5002]: I0930 12:39:55.842845 5002 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/295b654f-3031-4e18-9d39-12fbc8e739a5-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:55 crc kubenswrapper[5002]: I0930 12:39:55.842875 5002 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/295b654f-3031-4e18-9d39-12fbc8e739a5-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:55 crc kubenswrapper[5002]: I0930 12:39:55.844234 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-config-data" (OuterVolumeSpecName: "config-data") pod "295b654f-3031-4e18-9d39-12fbc8e739a5" (UID: "295b654f-3031-4e18-9d39-12fbc8e739a5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:55 crc kubenswrapper[5002]: I0930 12:39:55.844253 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "295b654f-3031-4e18-9d39-12fbc8e739a5" (UID: "295b654f-3031-4e18-9d39-12fbc8e739a5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:55 crc kubenswrapper[5002]: I0930 12:39:55.844372 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "295b654f-3031-4e18-9d39-12fbc8e739a5" (UID: "295b654f-3031-4e18-9d39-12fbc8e739a5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:55 crc kubenswrapper[5002]: I0930 12:39:55.844522 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "295b654f-3031-4e18-9d39-12fbc8e739a5" (UID: "295b654f-3031-4e18-9d39-12fbc8e739a5"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:55 crc kubenswrapper[5002]: I0930 12:39:55.859224 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/295b654f-3031-4e18-9d39-12fbc8e739a5-kube-api-access-9n8wx" (OuterVolumeSpecName: "kube-api-access-9n8wx") pod "295b654f-3031-4e18-9d39-12fbc8e739a5" (UID: "295b654f-3031-4e18-9d39-12fbc8e739a5"). InnerVolumeSpecName "kube-api-access-9n8wx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:39:55 crc kubenswrapper[5002]: I0930 12:39:55.861898 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-scripts" (OuterVolumeSpecName: "scripts") pod "295b654f-3031-4e18-9d39-12fbc8e739a5" (UID: "295b654f-3031-4e18-9d39-12fbc8e739a5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:55 crc kubenswrapper[5002]: I0930 12:39:55.945257 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:55 crc kubenswrapper[5002]: I0930 12:39:55.945611 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:55 crc kubenswrapper[5002]: I0930 12:39:55.945629 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9n8wx\" (UniqueName: \"kubernetes.io/projected/295b654f-3031-4e18-9d39-12fbc8e739a5-kube-api-access-9n8wx\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:55 crc kubenswrapper[5002]: I0930 12:39:55.945642 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:55 crc kubenswrapper[5002]: I0930 12:39:55.945654 5002 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:55 crc kubenswrapper[5002]: I0930 12:39:55.945667 5002 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/295b654f-3031-4e18-9d39-12fbc8e739a5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.646438 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.709288 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71f3b49b-1ea5-4f00-b250-a62eb24bd7f5" path="/var/lib/kubelet/pods/71f3b49b-1ea5-4f00-b250-a62eb24bd7f5/volumes" Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.728573 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.742851 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.755752 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.757843 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.760703 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.761204 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.762089 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.775175 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.865998 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/85c8795c-0a51-4d53-a20d-b0b96d217d93-run-httpd\") pod \"ceilometer-0\" (UID: \"85c8795c-0a51-4d53-a20d-b0b96d217d93\") " pod="openstack/ceilometer-0" Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.866065 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/85c8795c-0a51-4d53-a20d-b0b96d217d93-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"85c8795c-0a51-4d53-a20d-b0b96d217d93\") " pod="openstack/ceilometer-0" Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.866089 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85c8795c-0a51-4d53-a20d-b0b96d217d93-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"85c8795c-0a51-4d53-a20d-b0b96d217d93\") " pod="openstack/ceilometer-0" Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.866245 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85c8795c-0a51-4d53-a20d-b0b96d217d93-scripts\") pod \"ceilometer-0\" (UID: \"85c8795c-0a51-4d53-a20d-b0b96d217d93\") " pod="openstack/ceilometer-0" Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.866290 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4k69l\" (UniqueName: \"kubernetes.io/projected/85c8795c-0a51-4d53-a20d-b0b96d217d93-kube-api-access-4k69l\") pod \"ceilometer-0\" (UID: \"85c8795c-0a51-4d53-a20d-b0b96d217d93\") " pod="openstack/ceilometer-0" Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.866411 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/85c8795c-0a51-4d53-a20d-b0b96d217d93-log-httpd\") pod \"ceilometer-0\" (UID: \"85c8795c-0a51-4d53-a20d-b0b96d217d93\") " pod="openstack/ceilometer-0" Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.866527 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/85c8795c-0a51-4d53-a20d-b0b96d217d93-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"85c8795c-0a51-4d53-a20d-b0b96d217d93\") " pod="openstack/ceilometer-0" Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.866649 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85c8795c-0a51-4d53-a20d-b0b96d217d93-config-data\") pod \"ceilometer-0\" (UID: \"85c8795c-0a51-4d53-a20d-b0b96d217d93\") " pod="openstack/ceilometer-0" Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.967832 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/85c8795c-0a51-4d53-a20d-b0b96d217d93-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"85c8795c-0a51-4d53-a20d-b0b96d217d93\") " pod="openstack/ceilometer-0" Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.968185 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85c8795c-0a51-4d53-a20d-b0b96d217d93-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"85c8795c-0a51-4d53-a20d-b0b96d217d93\") " pod="openstack/ceilometer-0" Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.968233 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85c8795c-0a51-4d53-a20d-b0b96d217d93-scripts\") pod \"ceilometer-0\" (UID: \"85c8795c-0a51-4d53-a20d-b0b96d217d93\") " pod="openstack/ceilometer-0" Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.968250 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4k69l\" (UniqueName: \"kubernetes.io/projected/85c8795c-0a51-4d53-a20d-b0b96d217d93-kube-api-access-4k69l\") pod \"ceilometer-0\" (UID: \"85c8795c-0a51-4d53-a20d-b0b96d217d93\") " pod="openstack/ceilometer-0" Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.968279 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/85c8795c-0a51-4d53-a20d-b0b96d217d93-log-httpd\") pod \"ceilometer-0\" (UID: \"85c8795c-0a51-4d53-a20d-b0b96d217d93\") " pod="openstack/ceilometer-0" Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.968297 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/85c8795c-0a51-4d53-a20d-b0b96d217d93-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"85c8795c-0a51-4d53-a20d-b0b96d217d93\") " pod="openstack/ceilometer-0" Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.968522 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85c8795c-0a51-4d53-a20d-b0b96d217d93-config-data\") pod \"ceilometer-0\" (UID: \"85c8795c-0a51-4d53-a20d-b0b96d217d93\") " pod="openstack/ceilometer-0" Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.968887 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/85c8795c-0a51-4d53-a20d-b0b96d217d93-log-httpd\") pod \"ceilometer-0\" (UID: \"85c8795c-0a51-4d53-a20d-b0b96d217d93\") " pod="openstack/ceilometer-0" Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.969058 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/85c8795c-0a51-4d53-a20d-b0b96d217d93-run-httpd\") pod \"ceilometer-0\" (UID: \"85c8795c-0a51-4d53-a20d-b0b96d217d93\") " pod="openstack/ceilometer-0" Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.969316 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/85c8795c-0a51-4d53-a20d-b0b96d217d93-run-httpd\") pod \"ceilometer-0\" (UID: \"85c8795c-0a51-4d53-a20d-b0b96d217d93\") " pod="openstack/ceilometer-0" Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.972933 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85c8795c-0a51-4d53-a20d-b0b96d217d93-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"85c8795c-0a51-4d53-a20d-b0b96d217d93\") " pod="openstack/ceilometer-0" Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.973271 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85c8795c-0a51-4d53-a20d-b0b96d217d93-config-data\") pod \"ceilometer-0\" (UID: \"85c8795c-0a51-4d53-a20d-b0b96d217d93\") " pod="openstack/ceilometer-0" Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.973657 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85c8795c-0a51-4d53-a20d-b0b96d217d93-scripts\") pod \"ceilometer-0\" (UID: \"85c8795c-0a51-4d53-a20d-b0b96d217d93\") " pod="openstack/ceilometer-0" Sep 30 12:39:56 crc kubenswrapper[5002]: I0930 12:39:56.973757 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/85c8795c-0a51-4d53-a20d-b0b96d217d93-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"85c8795c-0a51-4d53-a20d-b0b96d217d93\") " pod="openstack/ceilometer-0" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.000084 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4k69l\" (UniqueName: \"kubernetes.io/projected/85c8795c-0a51-4d53-a20d-b0b96d217d93-kube-api-access-4k69l\") pod \"ceilometer-0\" (UID: \"85c8795c-0a51-4d53-a20d-b0b96d217d93\") " pod="openstack/ceilometer-0" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.000392 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/85c8795c-0a51-4d53-a20d-b0b96d217d93-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"85c8795c-0a51-4d53-a20d-b0b96d217d93\") " pod="openstack/ceilometer-0" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.089185 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.204394 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.274454 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6cd1979-da09-4b1b-afa5-b8a10085918c-config-data\") pod \"e6cd1979-da09-4b1b-afa5-b8a10085918c\" (UID: \"e6cd1979-da09-4b1b-afa5-b8a10085918c\") " Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.274584 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6cd1979-da09-4b1b-afa5-b8a10085918c-combined-ca-bundle\") pod \"e6cd1979-da09-4b1b-afa5-b8a10085918c\" (UID: \"e6cd1979-da09-4b1b-afa5-b8a10085918c\") " Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.274678 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6cd1979-da09-4b1b-afa5-b8a10085918c-logs\") pod \"e6cd1979-da09-4b1b-afa5-b8a10085918c\" (UID: \"e6cd1979-da09-4b1b-afa5-b8a10085918c\") " Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.274741 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hbktc\" (UniqueName: \"kubernetes.io/projected/e6cd1979-da09-4b1b-afa5-b8a10085918c-kube-api-access-hbktc\") pod \"e6cd1979-da09-4b1b-afa5-b8a10085918c\" (UID: \"e6cd1979-da09-4b1b-afa5-b8a10085918c\") " Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.277608 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6cd1979-da09-4b1b-afa5-b8a10085918c-logs" (OuterVolumeSpecName: "logs") pod "e6cd1979-da09-4b1b-afa5-b8a10085918c" (UID: "e6cd1979-da09-4b1b-afa5-b8a10085918c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.280890 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6cd1979-da09-4b1b-afa5-b8a10085918c-kube-api-access-hbktc" (OuterVolumeSpecName: "kube-api-access-hbktc") pod "e6cd1979-da09-4b1b-afa5-b8a10085918c" (UID: "e6cd1979-da09-4b1b-afa5-b8a10085918c"). InnerVolumeSpecName "kube-api-access-hbktc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.308556 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6cd1979-da09-4b1b-afa5-b8a10085918c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e6cd1979-da09-4b1b-afa5-b8a10085918c" (UID: "e6cd1979-da09-4b1b-afa5-b8a10085918c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.340283 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6cd1979-da09-4b1b-afa5-b8a10085918c-config-data" (OuterVolumeSpecName: "config-data") pod "e6cd1979-da09-4b1b-afa5-b8a10085918c" (UID: "e6cd1979-da09-4b1b-afa5-b8a10085918c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.376815 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hbktc\" (UniqueName: \"kubernetes.io/projected/e6cd1979-da09-4b1b-afa5-b8a10085918c-kube-api-access-hbktc\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.376856 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6cd1979-da09-4b1b-afa5-b8a10085918c-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.376867 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6cd1979-da09-4b1b-afa5-b8a10085918c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.376878 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6cd1979-da09-4b1b-afa5-b8a10085918c-logs\") on node \"crc\" DevicePath \"\"" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.531987 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 12:39:57 crc kubenswrapper[5002]: W0930 12:39:57.539701 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85c8795c_0a51_4d53_a20d_b0b96d217d93.slice/crio-90f327abc73e6feb8af77ac212816ad47decffee0cfe5faa41a9f8bcbe190173 WatchSource:0}: Error finding container 90f327abc73e6feb8af77ac212816ad47decffee0cfe5faa41a9f8bcbe190173: Status 404 returned error can't find the container with id 90f327abc73e6feb8af77ac212816ad47decffee0cfe5faa41a9f8bcbe190173 Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.655028 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"85c8795c-0a51-4d53-a20d-b0b96d217d93","Type":"ContainerStarted","Data":"90f327abc73e6feb8af77ac212816ad47decffee0cfe5faa41a9f8bcbe190173"} Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.657277 5002 generic.go:334] "Generic (PLEG): container finished" podID="e6cd1979-da09-4b1b-afa5-b8a10085918c" containerID="21360592d52e32cbc823f9b85082078f08331f6f682522de8ef6ea0e02afd5c8" exitCode=0 Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.657318 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e6cd1979-da09-4b1b-afa5-b8a10085918c","Type":"ContainerDied","Data":"21360592d52e32cbc823f9b85082078f08331f6f682522de8ef6ea0e02afd5c8"} Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.657344 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e6cd1979-da09-4b1b-afa5-b8a10085918c","Type":"ContainerDied","Data":"ad7f28d285b17fac9f094e9fa038ecaf959217c66931dbedc3d0aae72e2e47e7"} Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.657360 5002 scope.go:117] "RemoveContainer" containerID="21360592d52e32cbc823f9b85082078f08331f6f682522de8ef6ea0e02afd5c8" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.657490 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.687203 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.697380 5002 scope.go:117] "RemoveContainer" containerID="af68c7c8f22bd4d0756157902404dcece595bc7791982f00a673cd24d69bb23f" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.704464 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.720077 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 12:39:57 crc kubenswrapper[5002]: E0930 12:39:57.720498 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6cd1979-da09-4b1b-afa5-b8a10085918c" containerName="nova-api-api" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.720511 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6cd1979-da09-4b1b-afa5-b8a10085918c" containerName="nova-api-api" Sep 30 12:39:57 crc kubenswrapper[5002]: E0930 12:39:57.720539 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6cd1979-da09-4b1b-afa5-b8a10085918c" containerName="nova-api-log" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.720545 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6cd1979-da09-4b1b-afa5-b8a10085918c" containerName="nova-api-log" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.720709 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6cd1979-da09-4b1b-afa5-b8a10085918c" containerName="nova-api-api" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.720730 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6cd1979-da09-4b1b-afa5-b8a10085918c" containerName="nova-api-log" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.721664 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.724033 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.730885 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.733350 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.733589 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.742449 5002 scope.go:117] "RemoveContainer" containerID="21360592d52e32cbc823f9b85082078f08331f6f682522de8ef6ea0e02afd5c8" Sep 30 12:39:57 crc kubenswrapper[5002]: E0930 12:39:57.749978 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21360592d52e32cbc823f9b85082078f08331f6f682522de8ef6ea0e02afd5c8\": container with ID starting with 21360592d52e32cbc823f9b85082078f08331f6f682522de8ef6ea0e02afd5c8 not found: ID does not exist" containerID="21360592d52e32cbc823f9b85082078f08331f6f682522de8ef6ea0e02afd5c8" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.750029 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21360592d52e32cbc823f9b85082078f08331f6f682522de8ef6ea0e02afd5c8"} err="failed to get container status \"21360592d52e32cbc823f9b85082078f08331f6f682522de8ef6ea0e02afd5c8\": rpc error: code = NotFound desc = could not find container \"21360592d52e32cbc823f9b85082078f08331f6f682522de8ef6ea0e02afd5c8\": container with ID starting with 21360592d52e32cbc823f9b85082078f08331f6f682522de8ef6ea0e02afd5c8 not found: ID does not exist" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.750162 5002 scope.go:117] "RemoveContainer" containerID="af68c7c8f22bd4d0756157902404dcece595bc7791982f00a673cd24d69bb23f" Sep 30 12:39:57 crc kubenswrapper[5002]: E0930 12:39:57.750801 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af68c7c8f22bd4d0756157902404dcece595bc7791982f00a673cd24d69bb23f\": container with ID starting with af68c7c8f22bd4d0756157902404dcece595bc7791982f00a673cd24d69bb23f not found: ID does not exist" containerID="af68c7c8f22bd4d0756157902404dcece595bc7791982f00a673cd24d69bb23f" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.750846 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af68c7c8f22bd4d0756157902404dcece595bc7791982f00a673cd24d69bb23f"} err="failed to get container status \"af68c7c8f22bd4d0756157902404dcece595bc7791982f00a673cd24d69bb23f\": rpc error: code = NotFound desc = could not find container \"af68c7c8f22bd4d0756157902404dcece595bc7791982f00a673cd24d69bb23f\": container with ID starting with af68c7c8f22bd4d0756157902404dcece595bc7791982f00a673cd24d69bb23f not found: ID does not exist" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.796049 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0d8b305-6208-4a2d-b131-62d912765c52-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d0d8b305-6208-4a2d-b131-62d912765c52\") " pod="openstack/nova-api-0" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.796101 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0d8b305-6208-4a2d-b131-62d912765c52-logs\") pod \"nova-api-0\" (UID: \"d0d8b305-6208-4a2d-b131-62d912765c52\") " pod="openstack/nova-api-0" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.796301 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzrc4\" (UniqueName: \"kubernetes.io/projected/d0d8b305-6208-4a2d-b131-62d912765c52-kube-api-access-vzrc4\") pod \"nova-api-0\" (UID: \"d0d8b305-6208-4a2d-b131-62d912765c52\") " pod="openstack/nova-api-0" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.796390 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0d8b305-6208-4a2d-b131-62d912765c52-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d0d8b305-6208-4a2d-b131-62d912765c52\") " pod="openstack/nova-api-0" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.796565 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0d8b305-6208-4a2d-b131-62d912765c52-config-data\") pod \"nova-api-0\" (UID: \"d0d8b305-6208-4a2d-b131-62d912765c52\") " pod="openstack/nova-api-0" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.796647 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0d8b305-6208-4a2d-b131-62d912765c52-public-tls-certs\") pod \"nova-api-0\" (UID: \"d0d8b305-6208-4a2d-b131-62d912765c52\") " pod="openstack/nova-api-0" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.898919 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0d8b305-6208-4a2d-b131-62d912765c52-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d0d8b305-6208-4a2d-b131-62d912765c52\") " pod="openstack/nova-api-0" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.899197 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0d8b305-6208-4a2d-b131-62d912765c52-logs\") pod \"nova-api-0\" (UID: \"d0d8b305-6208-4a2d-b131-62d912765c52\") " pod="openstack/nova-api-0" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.899358 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzrc4\" (UniqueName: \"kubernetes.io/projected/d0d8b305-6208-4a2d-b131-62d912765c52-kube-api-access-vzrc4\") pod \"nova-api-0\" (UID: \"d0d8b305-6208-4a2d-b131-62d912765c52\") " pod="openstack/nova-api-0" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.899542 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0d8b305-6208-4a2d-b131-62d912765c52-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d0d8b305-6208-4a2d-b131-62d912765c52\") " pod="openstack/nova-api-0" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.899651 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0d8b305-6208-4a2d-b131-62d912765c52-config-data\") pod \"nova-api-0\" (UID: \"d0d8b305-6208-4a2d-b131-62d912765c52\") " pod="openstack/nova-api-0" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.899745 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0d8b305-6208-4a2d-b131-62d912765c52-public-tls-certs\") pod \"nova-api-0\" (UID: \"d0d8b305-6208-4a2d-b131-62d912765c52\") " pod="openstack/nova-api-0" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.899678 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0d8b305-6208-4a2d-b131-62d912765c52-logs\") pod \"nova-api-0\" (UID: \"d0d8b305-6208-4a2d-b131-62d912765c52\") " pod="openstack/nova-api-0" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.903693 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0d8b305-6208-4a2d-b131-62d912765c52-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d0d8b305-6208-4a2d-b131-62d912765c52\") " pod="openstack/nova-api-0" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.904771 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0d8b305-6208-4a2d-b131-62d912765c52-public-tls-certs\") pod \"nova-api-0\" (UID: \"d0d8b305-6208-4a2d-b131-62d912765c52\") " pod="openstack/nova-api-0" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.904784 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0d8b305-6208-4a2d-b131-62d912765c52-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d0d8b305-6208-4a2d-b131-62d912765c52\") " pod="openstack/nova-api-0" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.905134 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0d8b305-6208-4a2d-b131-62d912765c52-config-data\") pod \"nova-api-0\" (UID: \"d0d8b305-6208-4a2d-b131-62d912765c52\") " pod="openstack/nova-api-0" Sep 30 12:39:57 crc kubenswrapper[5002]: I0930 12:39:57.924966 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzrc4\" (UniqueName: \"kubernetes.io/projected/d0d8b305-6208-4a2d-b131-62d912765c52-kube-api-access-vzrc4\") pod \"nova-api-0\" (UID: \"d0d8b305-6208-4a2d-b131-62d912765c52\") " pod="openstack/nova-api-0" Sep 30 12:39:58 crc kubenswrapper[5002]: I0930 12:39:58.051538 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 12:39:58 crc kubenswrapper[5002]: I0930 12:39:58.242317 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:58 crc kubenswrapper[5002]: I0930 12:39:58.264169 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:58 crc kubenswrapper[5002]: I0930 12:39:58.511228 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 12:39:58 crc kubenswrapper[5002]: W0930 12:39:58.520254 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd0d8b305_6208_4a2d_b131_62d912765c52.slice/crio-813fd2f00dc0f680873425a9d91c4a1550cf78cb071acbb78df63d9757aa4a8e WatchSource:0}: Error finding container 813fd2f00dc0f680873425a9d91c4a1550cf78cb071acbb78df63d9757aa4a8e: Status 404 returned error can't find the container with id 813fd2f00dc0f680873425a9d91c4a1550cf78cb071acbb78df63d9757aa4a8e Sep 30 12:39:58 crc kubenswrapper[5002]: I0930 12:39:58.695526 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="295b654f-3031-4e18-9d39-12fbc8e739a5" path="/var/lib/kubelet/pods/295b654f-3031-4e18-9d39-12fbc8e739a5/volumes" Sep 30 12:39:58 crc kubenswrapper[5002]: I0930 12:39:58.699271 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6cd1979-da09-4b1b-afa5-b8a10085918c" path="/var/lib/kubelet/pods/e6cd1979-da09-4b1b-afa5-b8a10085918c/volumes" Sep 30 12:39:58 crc kubenswrapper[5002]: I0930 12:39:58.700400 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"85c8795c-0a51-4d53-a20d-b0b96d217d93","Type":"ContainerStarted","Data":"ccba853345406bddabf2c759075829791979a5c18ffe61e8a2f7afd6d7211021"} Sep 30 12:39:58 crc kubenswrapper[5002]: I0930 12:39:58.700661 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d0d8b305-6208-4a2d-b131-62d912765c52","Type":"ContainerStarted","Data":"813fd2f00dc0f680873425a9d91c4a1550cf78cb071acbb78df63d9757aa4a8e"} Sep 30 12:39:58 crc kubenswrapper[5002]: I0930 12:39:58.709095 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Sep 30 12:39:58 crc kubenswrapper[5002]: I0930 12:39:58.937061 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-5rtsz"] Sep 30 12:39:58 crc kubenswrapper[5002]: I0930 12:39:58.938987 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-5rtsz" Sep 30 12:39:58 crc kubenswrapper[5002]: I0930 12:39:58.942885 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Sep 30 12:39:58 crc kubenswrapper[5002]: I0930 12:39:58.943079 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Sep 30 12:39:58 crc kubenswrapper[5002]: I0930 12:39:58.946241 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-5rtsz"] Sep 30 12:39:59 crc kubenswrapper[5002]: I0930 12:39:59.027429 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6-scripts\") pod \"nova-cell1-cell-mapping-5rtsz\" (UID: \"df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6\") " pod="openstack/nova-cell1-cell-mapping-5rtsz" Sep 30 12:39:59 crc kubenswrapper[5002]: I0930 12:39:59.027501 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6-config-data\") pod \"nova-cell1-cell-mapping-5rtsz\" (UID: \"df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6\") " pod="openstack/nova-cell1-cell-mapping-5rtsz" Sep 30 12:39:59 crc kubenswrapper[5002]: I0930 12:39:59.027675 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-5rtsz\" (UID: \"df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6\") " pod="openstack/nova-cell1-cell-mapping-5rtsz" Sep 30 12:39:59 crc kubenswrapper[5002]: I0930 12:39:59.027706 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5nqc\" (UniqueName: \"kubernetes.io/projected/df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6-kube-api-access-b5nqc\") pod \"nova-cell1-cell-mapping-5rtsz\" (UID: \"df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6\") " pod="openstack/nova-cell1-cell-mapping-5rtsz" Sep 30 12:39:59 crc kubenswrapper[5002]: I0930 12:39:59.130217 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5nqc\" (UniqueName: \"kubernetes.io/projected/df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6-kube-api-access-b5nqc\") pod \"nova-cell1-cell-mapping-5rtsz\" (UID: \"df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6\") " pod="openstack/nova-cell1-cell-mapping-5rtsz" Sep 30 12:39:59 crc kubenswrapper[5002]: I0930 12:39:59.130354 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6-scripts\") pod \"nova-cell1-cell-mapping-5rtsz\" (UID: \"df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6\") " pod="openstack/nova-cell1-cell-mapping-5rtsz" Sep 30 12:39:59 crc kubenswrapper[5002]: I0930 12:39:59.130377 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6-config-data\") pod \"nova-cell1-cell-mapping-5rtsz\" (UID: \"df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6\") " pod="openstack/nova-cell1-cell-mapping-5rtsz" Sep 30 12:39:59 crc kubenswrapper[5002]: I0930 12:39:59.130516 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-5rtsz\" (UID: \"df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6\") " pod="openstack/nova-cell1-cell-mapping-5rtsz" Sep 30 12:39:59 crc kubenswrapper[5002]: I0930 12:39:59.136001 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6-scripts\") pod \"nova-cell1-cell-mapping-5rtsz\" (UID: \"df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6\") " pod="openstack/nova-cell1-cell-mapping-5rtsz" Sep 30 12:39:59 crc kubenswrapper[5002]: I0930 12:39:59.136084 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-5rtsz\" (UID: \"df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6\") " pod="openstack/nova-cell1-cell-mapping-5rtsz" Sep 30 12:39:59 crc kubenswrapper[5002]: I0930 12:39:59.136188 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6-config-data\") pod \"nova-cell1-cell-mapping-5rtsz\" (UID: \"df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6\") " pod="openstack/nova-cell1-cell-mapping-5rtsz" Sep 30 12:39:59 crc kubenswrapper[5002]: I0930 12:39:59.147935 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5nqc\" (UniqueName: \"kubernetes.io/projected/df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6-kube-api-access-b5nqc\") pod \"nova-cell1-cell-mapping-5rtsz\" (UID: \"df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6\") " pod="openstack/nova-cell1-cell-mapping-5rtsz" Sep 30 12:39:59 crc kubenswrapper[5002]: I0930 12:39:59.185428 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-5rtsz" Sep 30 12:39:59 crc kubenswrapper[5002]: W0930 12:39:59.674647 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddf974acb_ce3b_47d1_a5f5_a6f5ad8c95c6.slice/crio-3d172e42ef78d22ef1a050c14f8bd266a28081dc41c21bb3151099828c58bc02 WatchSource:0}: Error finding container 3d172e42ef78d22ef1a050c14f8bd266a28081dc41c21bb3151099828c58bc02: Status 404 returned error can't find the container with id 3d172e42ef78d22ef1a050c14f8bd266a28081dc41c21bb3151099828c58bc02 Sep 30 12:39:59 crc kubenswrapper[5002]: I0930 12:39:59.678207 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-5rtsz"] Sep 30 12:39:59 crc kubenswrapper[5002]: I0930 12:39:59.714210 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"85c8795c-0a51-4d53-a20d-b0b96d217d93","Type":"ContainerStarted","Data":"63d19886190e4585f7ac9724227d2ac39aef0d478573dcbfa9b3c687c7160353"} Sep 30 12:39:59 crc kubenswrapper[5002]: I0930 12:39:59.716001 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d0d8b305-6208-4a2d-b131-62d912765c52","Type":"ContainerStarted","Data":"933a2235c968eb0fb4327cca3fc86d331b45c8d8cb639b908e50dd214f689ee6"} Sep 30 12:39:59 crc kubenswrapper[5002]: I0930 12:39:59.716087 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d0d8b305-6208-4a2d-b131-62d912765c52","Type":"ContainerStarted","Data":"3c2440447b6a4421dba644b34f39e975f7156ea82a02cce4b3dabc454029a1ad"} Sep 30 12:39:59 crc kubenswrapper[5002]: I0930 12:39:59.717374 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-5rtsz" event={"ID":"df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6","Type":"ContainerStarted","Data":"3d172e42ef78d22ef1a050c14f8bd266a28081dc41c21bb3151099828c58bc02"} Sep 30 12:39:59 crc kubenswrapper[5002]: I0930 12:39:59.745711 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.745691568 podStartE2EDuration="2.745691568s" podCreationTimestamp="2025-09-30 12:39:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:39:59.735744503 +0000 UTC m=+1173.985426649" watchObservedRunningTime="2025-09-30 12:39:59.745691568 +0000 UTC m=+1173.995373714" Sep 30 12:40:00 crc kubenswrapper[5002]: I0930 12:40:00.727806 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-5rtsz" event={"ID":"df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6","Type":"ContainerStarted","Data":"d39c28ef837a1a5c0ad1fbcf5c80102a2063ce44028c7f0f673ccbb8313d06b5"} Sep 30 12:40:00 crc kubenswrapper[5002]: I0930 12:40:00.730133 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"85c8795c-0a51-4d53-a20d-b0b96d217d93","Type":"ContainerStarted","Data":"3635743cab97c5cefa7ed57d32793cdd5cf5d7978017014749bee9038f066e96"} Sep 30 12:40:00 crc kubenswrapper[5002]: I0930 12:40:00.744550 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-5rtsz" podStartSLOduration=2.744530424 podStartE2EDuration="2.744530424s" podCreationTimestamp="2025-09-30 12:39:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:40:00.743482605 +0000 UTC m=+1174.993164761" watchObservedRunningTime="2025-09-30 12:40:00.744530424 +0000 UTC m=+1174.994212570" Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.088675 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.164333 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-n66v8"] Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.164624 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" podUID="9aafee6a-2a42-4a59-abf5-9ac7cdd70aca" containerName="dnsmasq-dns" containerID="cri-o://bbf3996d83b99e8eff6ef68b22b497edb43c2058697c2b76f69ce6ee22cb392d" gracePeriod=10 Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.679981 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.743924 5002 generic.go:334] "Generic (PLEG): container finished" podID="9aafee6a-2a42-4a59-abf5-9ac7cdd70aca" containerID="bbf3996d83b99e8eff6ef68b22b497edb43c2058697c2b76f69ce6ee22cb392d" exitCode=0 Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.743977 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" event={"ID":"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca","Type":"ContainerDied","Data":"bbf3996d83b99e8eff6ef68b22b497edb43c2058697c2b76f69ce6ee22cb392d"} Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.744003 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" event={"ID":"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca","Type":"ContainerDied","Data":"baf56aeed60266b14e01708d07e222f7ee859fd743f8b73fa267c542cbfb0d23"} Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.744019 5002 scope.go:117] "RemoveContainer" containerID="bbf3996d83b99e8eff6ef68b22b497edb43c2058697c2b76f69ce6ee22cb392d" Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.744133 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-n66v8" Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.757561 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"85c8795c-0a51-4d53-a20d-b0b96d217d93","Type":"ContainerStarted","Data":"6f40fc3302e70260c4d3df15dbc0029cecd17204abb03b33a2fa41fbac1c9649"} Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.757729 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.772154 5002 scope.go:117] "RemoveContainer" containerID="9ed648102b4434d916a0bf898ff00dd41841530a9b308463a228c82210ebcc2d" Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.782699 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-ovsdbserver-sb\") pod \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\" (UID: \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\") " Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.782784 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-ovsdbserver-nb\") pod \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\" (UID: \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\") " Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.782873 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-dns-swift-storage-0\") pod \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\" (UID: \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\") " Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.783014 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-dns-svc\") pod \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\" (UID: \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\") " Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.783842 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6k7rn\" (UniqueName: \"kubernetes.io/projected/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-kube-api-access-6k7rn\") pod \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\" (UID: \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\") " Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.783909 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-config\") pod \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\" (UID: \"9aafee6a-2a42-4a59-abf5-9ac7cdd70aca\") " Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.784440 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.11830392 podStartE2EDuration="5.784423342s" podCreationTimestamp="2025-09-30 12:39:56 +0000 UTC" firstStartedPulling="2025-09-30 12:39:57.54382493 +0000 UTC m=+1171.793507076" lastFinishedPulling="2025-09-30 12:40:01.209944352 +0000 UTC m=+1175.459626498" observedRunningTime="2025-09-30 12:40:01.782111279 +0000 UTC m=+1176.031793445" watchObservedRunningTime="2025-09-30 12:40:01.784423342 +0000 UTC m=+1176.034105488" Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.807192 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-kube-api-access-6k7rn" (OuterVolumeSpecName: "kube-api-access-6k7rn") pod "9aafee6a-2a42-4a59-abf5-9ac7cdd70aca" (UID: "9aafee6a-2a42-4a59-abf5-9ac7cdd70aca"). InnerVolumeSpecName "kube-api-access-6k7rn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.809710 5002 scope.go:117] "RemoveContainer" containerID="bbf3996d83b99e8eff6ef68b22b497edb43c2058697c2b76f69ce6ee22cb392d" Sep 30 12:40:01 crc kubenswrapper[5002]: E0930 12:40:01.810262 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bbf3996d83b99e8eff6ef68b22b497edb43c2058697c2b76f69ce6ee22cb392d\": container with ID starting with bbf3996d83b99e8eff6ef68b22b497edb43c2058697c2b76f69ce6ee22cb392d not found: ID does not exist" containerID="bbf3996d83b99e8eff6ef68b22b497edb43c2058697c2b76f69ce6ee22cb392d" Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.810304 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbf3996d83b99e8eff6ef68b22b497edb43c2058697c2b76f69ce6ee22cb392d"} err="failed to get container status \"bbf3996d83b99e8eff6ef68b22b497edb43c2058697c2b76f69ce6ee22cb392d\": rpc error: code = NotFound desc = could not find container \"bbf3996d83b99e8eff6ef68b22b497edb43c2058697c2b76f69ce6ee22cb392d\": container with ID starting with bbf3996d83b99e8eff6ef68b22b497edb43c2058697c2b76f69ce6ee22cb392d not found: ID does not exist" Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.810329 5002 scope.go:117] "RemoveContainer" containerID="9ed648102b4434d916a0bf898ff00dd41841530a9b308463a228c82210ebcc2d" Sep 30 12:40:01 crc kubenswrapper[5002]: E0930 12:40:01.810691 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ed648102b4434d916a0bf898ff00dd41841530a9b308463a228c82210ebcc2d\": container with ID starting with 9ed648102b4434d916a0bf898ff00dd41841530a9b308463a228c82210ebcc2d not found: ID does not exist" containerID="9ed648102b4434d916a0bf898ff00dd41841530a9b308463a228c82210ebcc2d" Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.810723 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ed648102b4434d916a0bf898ff00dd41841530a9b308463a228c82210ebcc2d"} err="failed to get container status \"9ed648102b4434d916a0bf898ff00dd41841530a9b308463a228c82210ebcc2d\": rpc error: code = NotFound desc = could not find container \"9ed648102b4434d916a0bf898ff00dd41841530a9b308463a228c82210ebcc2d\": container with ID starting with 9ed648102b4434d916a0bf898ff00dd41841530a9b308463a228c82210ebcc2d not found: ID does not exist" Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.865205 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "9aafee6a-2a42-4a59-abf5-9ac7cdd70aca" (UID: "9aafee6a-2a42-4a59-abf5-9ac7cdd70aca"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.870320 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9aafee6a-2a42-4a59-abf5-9ac7cdd70aca" (UID: "9aafee6a-2a42-4a59-abf5-9ac7cdd70aca"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.873228 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-config" (OuterVolumeSpecName: "config") pod "9aafee6a-2a42-4a59-abf5-9ac7cdd70aca" (UID: "9aafee6a-2a42-4a59-abf5-9ac7cdd70aca"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.881141 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9aafee6a-2a42-4a59-abf5-9ac7cdd70aca" (UID: "9aafee6a-2a42-4a59-abf5-9ac7cdd70aca"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.884410 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9aafee6a-2a42-4a59-abf5-9ac7cdd70aca" (UID: "9aafee6a-2a42-4a59-abf5-9ac7cdd70aca"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.885928 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.886028 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6k7rn\" (UniqueName: \"kubernetes.io/projected/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-kube-api-access-6k7rn\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.886303 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.886412 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.886596 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:01 crc kubenswrapper[5002]: I0930 12:40:01.886676 5002 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:02 crc kubenswrapper[5002]: I0930 12:40:02.079189 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-n66v8"] Sep 30 12:40:02 crc kubenswrapper[5002]: I0930 12:40:02.086294 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-n66v8"] Sep 30 12:40:02 crc kubenswrapper[5002]: I0930 12:40:02.695423 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9aafee6a-2a42-4a59-abf5-9ac7cdd70aca" path="/var/lib/kubelet/pods/9aafee6a-2a42-4a59-abf5-9ac7cdd70aca/volumes" Sep 30 12:40:04 crc kubenswrapper[5002]: I0930 12:40:04.795640 5002 generic.go:334] "Generic (PLEG): container finished" podID="df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6" containerID="d39c28ef837a1a5c0ad1fbcf5c80102a2063ce44028c7f0f673ccbb8313d06b5" exitCode=0 Sep 30 12:40:04 crc kubenswrapper[5002]: I0930 12:40:04.795701 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-5rtsz" event={"ID":"df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6","Type":"ContainerDied","Data":"d39c28ef837a1a5c0ad1fbcf5c80102a2063ce44028c7f0f673ccbb8313d06b5"} Sep 30 12:40:06 crc kubenswrapper[5002]: I0930 12:40:06.197993 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-5rtsz" Sep 30 12:40:06 crc kubenswrapper[5002]: I0930 12:40:06.291033 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6-scripts\") pod \"df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6\" (UID: \"df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6\") " Sep 30 12:40:06 crc kubenswrapper[5002]: I0930 12:40:06.291227 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b5nqc\" (UniqueName: \"kubernetes.io/projected/df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6-kube-api-access-b5nqc\") pod \"df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6\" (UID: \"df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6\") " Sep 30 12:40:06 crc kubenswrapper[5002]: I0930 12:40:06.291338 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6-combined-ca-bundle\") pod \"df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6\" (UID: \"df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6\") " Sep 30 12:40:06 crc kubenswrapper[5002]: I0930 12:40:06.291415 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6-config-data\") pod \"df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6\" (UID: \"df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6\") " Sep 30 12:40:06 crc kubenswrapper[5002]: I0930 12:40:06.297646 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6-scripts" (OuterVolumeSpecName: "scripts") pod "df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6" (UID: "df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:40:06 crc kubenswrapper[5002]: I0930 12:40:06.298721 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6-kube-api-access-b5nqc" (OuterVolumeSpecName: "kube-api-access-b5nqc") pod "df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6" (UID: "df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6"). InnerVolumeSpecName "kube-api-access-b5nqc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:40:06 crc kubenswrapper[5002]: I0930 12:40:06.328361 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6" (UID: "df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:40:06 crc kubenswrapper[5002]: I0930 12:40:06.342379 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6-config-data" (OuterVolumeSpecName: "config-data") pod "df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6" (UID: "df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:40:06 crc kubenswrapper[5002]: I0930 12:40:06.393458 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:06 crc kubenswrapper[5002]: I0930 12:40:06.393509 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:06 crc kubenswrapper[5002]: I0930 12:40:06.393521 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:06 crc kubenswrapper[5002]: I0930 12:40:06.393532 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b5nqc\" (UniqueName: \"kubernetes.io/projected/df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6-kube-api-access-b5nqc\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:06 crc kubenswrapper[5002]: I0930 12:40:06.816443 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-5rtsz" event={"ID":"df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6","Type":"ContainerDied","Data":"3d172e42ef78d22ef1a050c14f8bd266a28081dc41c21bb3151099828c58bc02"} Sep 30 12:40:06 crc kubenswrapper[5002]: I0930 12:40:06.816498 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3d172e42ef78d22ef1a050c14f8bd266a28081dc41c21bb3151099828c58bc02" Sep 30 12:40:06 crc kubenswrapper[5002]: I0930 12:40:06.816526 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-5rtsz" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.006555 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.007146 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d0d8b305-6208-4a2d-b131-62d912765c52" containerName="nova-api-log" containerID="cri-o://3c2440447b6a4421dba644b34f39e975f7156ea82a02cce4b3dabc454029a1ad" gracePeriod=30 Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.007264 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d0d8b305-6208-4a2d-b131-62d912765c52" containerName="nova-api-api" containerID="cri-o://933a2235c968eb0fb4327cca3fc86d331b45c8d8cb639b908e50dd214f689ee6" gracePeriod=30 Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.021460 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.021883 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="41c5acb5-451a-46fe-8384-be7514e49ea9" containerName="nova-scheduler-scheduler" containerID="cri-o://ea585a3f09929f61b29d55fd2e68fe5bdb058d85d58893f6c8db15ac0460e776" gracePeriod=30 Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.036695 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.037150 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b23ec22e-90c8-41b7-bdc8-4861c7ec9c83" containerName="nova-metadata-log" containerID="cri-o://8522f43ae6027cb6e0fbd468460668db4682d9c645ae511d039a53059ab12116" gracePeriod=30 Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.037281 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b23ec22e-90c8-41b7-bdc8-4861c7ec9c83" containerName="nova-metadata-metadata" containerID="cri-o://07a85959d4119add40b7fa627c82be82afc4eca869d90b2ae9adee1e50c283ef" gracePeriod=30 Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.639923 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.728403 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0d8b305-6208-4a2d-b131-62d912765c52-logs\") pod \"d0d8b305-6208-4a2d-b131-62d912765c52\" (UID: \"d0d8b305-6208-4a2d-b131-62d912765c52\") " Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.728534 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0d8b305-6208-4a2d-b131-62d912765c52-config-data\") pod \"d0d8b305-6208-4a2d-b131-62d912765c52\" (UID: \"d0d8b305-6208-4a2d-b131-62d912765c52\") " Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.728638 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0d8b305-6208-4a2d-b131-62d912765c52-combined-ca-bundle\") pod \"d0d8b305-6208-4a2d-b131-62d912765c52\" (UID: \"d0d8b305-6208-4a2d-b131-62d912765c52\") " Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.728683 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vzrc4\" (UniqueName: \"kubernetes.io/projected/d0d8b305-6208-4a2d-b131-62d912765c52-kube-api-access-vzrc4\") pod \"d0d8b305-6208-4a2d-b131-62d912765c52\" (UID: \"d0d8b305-6208-4a2d-b131-62d912765c52\") " Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.728711 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0d8b305-6208-4a2d-b131-62d912765c52-public-tls-certs\") pod \"d0d8b305-6208-4a2d-b131-62d912765c52\" (UID: \"d0d8b305-6208-4a2d-b131-62d912765c52\") " Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.728730 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0d8b305-6208-4a2d-b131-62d912765c52-internal-tls-certs\") pod \"d0d8b305-6208-4a2d-b131-62d912765c52\" (UID: \"d0d8b305-6208-4a2d-b131-62d912765c52\") " Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.730943 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0d8b305-6208-4a2d-b131-62d912765c52-logs" (OuterVolumeSpecName: "logs") pod "d0d8b305-6208-4a2d-b131-62d912765c52" (UID: "d0d8b305-6208-4a2d-b131-62d912765c52"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.735674 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0d8b305-6208-4a2d-b131-62d912765c52-kube-api-access-vzrc4" (OuterVolumeSpecName: "kube-api-access-vzrc4") pod "d0d8b305-6208-4a2d-b131-62d912765c52" (UID: "d0d8b305-6208-4a2d-b131-62d912765c52"). InnerVolumeSpecName "kube-api-access-vzrc4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.757170 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0d8b305-6208-4a2d-b131-62d912765c52-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d0d8b305-6208-4a2d-b131-62d912765c52" (UID: "d0d8b305-6208-4a2d-b131-62d912765c52"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.763657 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0d8b305-6208-4a2d-b131-62d912765c52-config-data" (OuterVolumeSpecName: "config-data") pod "d0d8b305-6208-4a2d-b131-62d912765c52" (UID: "d0d8b305-6208-4a2d-b131-62d912765c52"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.789927 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0d8b305-6208-4a2d-b131-62d912765c52-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "d0d8b305-6208-4a2d-b131-62d912765c52" (UID: "d0d8b305-6208-4a2d-b131-62d912765c52"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.797712 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0d8b305-6208-4a2d-b131-62d912765c52-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "d0d8b305-6208-4a2d-b131-62d912765c52" (UID: "d0d8b305-6208-4a2d-b131-62d912765c52"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.825804 5002 generic.go:334] "Generic (PLEG): container finished" podID="d0d8b305-6208-4a2d-b131-62d912765c52" containerID="933a2235c968eb0fb4327cca3fc86d331b45c8d8cb639b908e50dd214f689ee6" exitCode=0 Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.825838 5002 generic.go:334] "Generic (PLEG): container finished" podID="d0d8b305-6208-4a2d-b131-62d912765c52" containerID="3c2440447b6a4421dba644b34f39e975f7156ea82a02cce4b3dabc454029a1ad" exitCode=143 Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.825891 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.825898 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d0d8b305-6208-4a2d-b131-62d912765c52","Type":"ContainerDied","Data":"933a2235c968eb0fb4327cca3fc86d331b45c8d8cb639b908e50dd214f689ee6"} Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.826019 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d0d8b305-6208-4a2d-b131-62d912765c52","Type":"ContainerDied","Data":"3c2440447b6a4421dba644b34f39e975f7156ea82a02cce4b3dabc454029a1ad"} Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.826038 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d0d8b305-6208-4a2d-b131-62d912765c52","Type":"ContainerDied","Data":"813fd2f00dc0f680873425a9d91c4a1550cf78cb071acbb78df63d9757aa4a8e"} Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.826058 5002 scope.go:117] "RemoveContainer" containerID="933a2235c968eb0fb4327cca3fc86d331b45c8d8cb639b908e50dd214f689ee6" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.828547 5002 generic.go:334] "Generic (PLEG): container finished" podID="b23ec22e-90c8-41b7-bdc8-4861c7ec9c83" containerID="8522f43ae6027cb6e0fbd468460668db4682d9c645ae511d039a53059ab12116" exitCode=143 Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.828592 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83","Type":"ContainerDied","Data":"8522f43ae6027cb6e0fbd468460668db4682d9c645ae511d039a53059ab12116"} Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.830485 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0d8b305-6208-4a2d-b131-62d912765c52-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.830510 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vzrc4\" (UniqueName: \"kubernetes.io/projected/d0d8b305-6208-4a2d-b131-62d912765c52-kube-api-access-vzrc4\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.830522 5002 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0d8b305-6208-4a2d-b131-62d912765c52-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.830531 5002 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0d8b305-6208-4a2d-b131-62d912765c52-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.830539 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0d8b305-6208-4a2d-b131-62d912765c52-logs\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.830548 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0d8b305-6208-4a2d-b131-62d912765c52-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.849818 5002 scope.go:117] "RemoveContainer" containerID="3c2440447b6a4421dba644b34f39e975f7156ea82a02cce4b3dabc454029a1ad" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.857633 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.864809 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.874645 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 12:40:07 crc kubenswrapper[5002]: E0930 12:40:07.875000 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6" containerName="nova-manage" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.875015 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6" containerName="nova-manage" Sep 30 12:40:07 crc kubenswrapper[5002]: E0930 12:40:07.875028 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9aafee6a-2a42-4a59-abf5-9ac7cdd70aca" containerName="dnsmasq-dns" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.875033 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9aafee6a-2a42-4a59-abf5-9ac7cdd70aca" containerName="dnsmasq-dns" Sep 30 12:40:07 crc kubenswrapper[5002]: E0930 12:40:07.875062 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0d8b305-6208-4a2d-b131-62d912765c52" containerName="nova-api-log" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.875067 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0d8b305-6208-4a2d-b131-62d912765c52" containerName="nova-api-log" Sep 30 12:40:07 crc kubenswrapper[5002]: E0930 12:40:07.875078 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0d8b305-6208-4a2d-b131-62d912765c52" containerName="nova-api-api" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.875084 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0d8b305-6208-4a2d-b131-62d912765c52" containerName="nova-api-api" Sep 30 12:40:07 crc kubenswrapper[5002]: E0930 12:40:07.875094 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9aafee6a-2a42-4a59-abf5-9ac7cdd70aca" containerName="init" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.875100 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9aafee6a-2a42-4a59-abf5-9ac7cdd70aca" containerName="init" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.875249 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6" containerName="nova-manage" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.875265 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="9aafee6a-2a42-4a59-abf5-9ac7cdd70aca" containerName="dnsmasq-dns" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.875281 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0d8b305-6208-4a2d-b131-62d912765c52" containerName="nova-api-api" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.875301 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0d8b305-6208-4a2d-b131-62d912765c52" containerName="nova-api-log" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.879098 5002 scope.go:117] "RemoveContainer" containerID="933a2235c968eb0fb4327cca3fc86d331b45c8d8cb639b908e50dd214f689ee6" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.879387 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 12:40:07 crc kubenswrapper[5002]: E0930 12:40:07.879808 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"933a2235c968eb0fb4327cca3fc86d331b45c8d8cb639b908e50dd214f689ee6\": container with ID starting with 933a2235c968eb0fb4327cca3fc86d331b45c8d8cb639b908e50dd214f689ee6 not found: ID does not exist" containerID="933a2235c968eb0fb4327cca3fc86d331b45c8d8cb639b908e50dd214f689ee6" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.879864 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"933a2235c968eb0fb4327cca3fc86d331b45c8d8cb639b908e50dd214f689ee6"} err="failed to get container status \"933a2235c968eb0fb4327cca3fc86d331b45c8d8cb639b908e50dd214f689ee6\": rpc error: code = NotFound desc = could not find container \"933a2235c968eb0fb4327cca3fc86d331b45c8d8cb639b908e50dd214f689ee6\": container with ID starting with 933a2235c968eb0fb4327cca3fc86d331b45c8d8cb639b908e50dd214f689ee6 not found: ID does not exist" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.879899 5002 scope.go:117] "RemoveContainer" containerID="3c2440447b6a4421dba644b34f39e975f7156ea82a02cce4b3dabc454029a1ad" Sep 30 12:40:07 crc kubenswrapper[5002]: E0930 12:40:07.880354 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c2440447b6a4421dba644b34f39e975f7156ea82a02cce4b3dabc454029a1ad\": container with ID starting with 3c2440447b6a4421dba644b34f39e975f7156ea82a02cce4b3dabc454029a1ad not found: ID does not exist" containerID="3c2440447b6a4421dba644b34f39e975f7156ea82a02cce4b3dabc454029a1ad" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.880397 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c2440447b6a4421dba644b34f39e975f7156ea82a02cce4b3dabc454029a1ad"} err="failed to get container status \"3c2440447b6a4421dba644b34f39e975f7156ea82a02cce4b3dabc454029a1ad\": rpc error: code = NotFound desc = could not find container \"3c2440447b6a4421dba644b34f39e975f7156ea82a02cce4b3dabc454029a1ad\": container with ID starting with 3c2440447b6a4421dba644b34f39e975f7156ea82a02cce4b3dabc454029a1ad not found: ID does not exist" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.880431 5002 scope.go:117] "RemoveContainer" containerID="933a2235c968eb0fb4327cca3fc86d331b45c8d8cb639b908e50dd214f689ee6" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.881069 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"933a2235c968eb0fb4327cca3fc86d331b45c8d8cb639b908e50dd214f689ee6"} err="failed to get container status \"933a2235c968eb0fb4327cca3fc86d331b45c8d8cb639b908e50dd214f689ee6\": rpc error: code = NotFound desc = could not find container \"933a2235c968eb0fb4327cca3fc86d331b45c8d8cb639b908e50dd214f689ee6\": container with ID starting with 933a2235c968eb0fb4327cca3fc86d331b45c8d8cb639b908e50dd214f689ee6 not found: ID does not exist" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.881101 5002 scope.go:117] "RemoveContainer" containerID="3c2440447b6a4421dba644b34f39e975f7156ea82a02cce4b3dabc454029a1ad" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.881143 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.881285 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.881546 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.882059 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c2440447b6a4421dba644b34f39e975f7156ea82a02cce4b3dabc454029a1ad"} err="failed to get container status \"3c2440447b6a4421dba644b34f39e975f7156ea82a02cce4b3dabc454029a1ad\": rpc error: code = NotFound desc = could not find container \"3c2440447b6a4421dba644b34f39e975f7156ea82a02cce4b3dabc454029a1ad\": container with ID starting with 3c2440447b6a4421dba644b34f39e975f7156ea82a02cce4b3dabc454029a1ad not found: ID does not exist" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.893829 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.932164 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/822ff6cf-9987-40ed-929e-615b255cc532-internal-tls-certs\") pod \"nova-api-0\" (UID: \"822ff6cf-9987-40ed-929e-615b255cc532\") " pod="openstack/nova-api-0" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.932304 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/822ff6cf-9987-40ed-929e-615b255cc532-public-tls-certs\") pod \"nova-api-0\" (UID: \"822ff6cf-9987-40ed-929e-615b255cc532\") " pod="openstack/nova-api-0" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.932371 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/822ff6cf-9987-40ed-929e-615b255cc532-config-data\") pod \"nova-api-0\" (UID: \"822ff6cf-9987-40ed-929e-615b255cc532\") " pod="openstack/nova-api-0" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.932430 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzw79\" (UniqueName: \"kubernetes.io/projected/822ff6cf-9987-40ed-929e-615b255cc532-kube-api-access-mzw79\") pod \"nova-api-0\" (UID: \"822ff6cf-9987-40ed-929e-615b255cc532\") " pod="openstack/nova-api-0" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.932447 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/822ff6cf-9987-40ed-929e-615b255cc532-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"822ff6cf-9987-40ed-929e-615b255cc532\") " pod="openstack/nova-api-0" Sep 30 12:40:07 crc kubenswrapper[5002]: I0930 12:40:07.932505 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/822ff6cf-9987-40ed-929e-615b255cc532-logs\") pod \"nova-api-0\" (UID: \"822ff6cf-9987-40ed-929e-615b255cc532\") " pod="openstack/nova-api-0" Sep 30 12:40:08 crc kubenswrapper[5002]: I0930 12:40:08.034229 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/822ff6cf-9987-40ed-929e-615b255cc532-internal-tls-certs\") pod \"nova-api-0\" (UID: \"822ff6cf-9987-40ed-929e-615b255cc532\") " pod="openstack/nova-api-0" Sep 30 12:40:08 crc kubenswrapper[5002]: I0930 12:40:08.034320 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/822ff6cf-9987-40ed-929e-615b255cc532-public-tls-certs\") pod \"nova-api-0\" (UID: \"822ff6cf-9987-40ed-929e-615b255cc532\") " pod="openstack/nova-api-0" Sep 30 12:40:08 crc kubenswrapper[5002]: I0930 12:40:08.034380 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/822ff6cf-9987-40ed-929e-615b255cc532-config-data\") pod \"nova-api-0\" (UID: \"822ff6cf-9987-40ed-929e-615b255cc532\") " pod="openstack/nova-api-0" Sep 30 12:40:08 crc kubenswrapper[5002]: I0930 12:40:08.034406 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzw79\" (UniqueName: \"kubernetes.io/projected/822ff6cf-9987-40ed-929e-615b255cc532-kube-api-access-mzw79\") pod \"nova-api-0\" (UID: \"822ff6cf-9987-40ed-929e-615b255cc532\") " pod="openstack/nova-api-0" Sep 30 12:40:08 crc kubenswrapper[5002]: I0930 12:40:08.034424 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/822ff6cf-9987-40ed-929e-615b255cc532-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"822ff6cf-9987-40ed-929e-615b255cc532\") " pod="openstack/nova-api-0" Sep 30 12:40:08 crc kubenswrapper[5002]: I0930 12:40:08.034455 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/822ff6cf-9987-40ed-929e-615b255cc532-logs\") pod \"nova-api-0\" (UID: \"822ff6cf-9987-40ed-929e-615b255cc532\") " pod="openstack/nova-api-0" Sep 30 12:40:08 crc kubenswrapper[5002]: I0930 12:40:08.034907 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/822ff6cf-9987-40ed-929e-615b255cc532-logs\") pod \"nova-api-0\" (UID: \"822ff6cf-9987-40ed-929e-615b255cc532\") " pod="openstack/nova-api-0" Sep 30 12:40:08 crc kubenswrapper[5002]: I0930 12:40:08.037747 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/822ff6cf-9987-40ed-929e-615b255cc532-public-tls-certs\") pod \"nova-api-0\" (UID: \"822ff6cf-9987-40ed-929e-615b255cc532\") " pod="openstack/nova-api-0" Sep 30 12:40:08 crc kubenswrapper[5002]: I0930 12:40:08.037863 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/822ff6cf-9987-40ed-929e-615b255cc532-internal-tls-certs\") pod \"nova-api-0\" (UID: \"822ff6cf-9987-40ed-929e-615b255cc532\") " pod="openstack/nova-api-0" Sep 30 12:40:08 crc kubenswrapper[5002]: I0930 12:40:08.038159 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/822ff6cf-9987-40ed-929e-615b255cc532-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"822ff6cf-9987-40ed-929e-615b255cc532\") " pod="openstack/nova-api-0" Sep 30 12:40:08 crc kubenswrapper[5002]: I0930 12:40:08.038646 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/822ff6cf-9987-40ed-929e-615b255cc532-config-data\") pod \"nova-api-0\" (UID: \"822ff6cf-9987-40ed-929e-615b255cc532\") " pod="openstack/nova-api-0" Sep 30 12:40:08 crc kubenswrapper[5002]: I0930 12:40:08.052064 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzw79\" (UniqueName: \"kubernetes.io/projected/822ff6cf-9987-40ed-929e-615b255cc532-kube-api-access-mzw79\") pod \"nova-api-0\" (UID: \"822ff6cf-9987-40ed-929e-615b255cc532\") " pod="openstack/nova-api-0" Sep 30 12:40:08 crc kubenswrapper[5002]: I0930 12:40:08.196068 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 12:40:08 crc kubenswrapper[5002]: I0930 12:40:08.705484 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0d8b305-6208-4a2d-b131-62d912765c52" path="/var/lib/kubelet/pods/d0d8b305-6208-4a2d-b131-62d912765c52/volumes" Sep 30 12:40:08 crc kubenswrapper[5002]: I0930 12:40:08.706780 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 12:40:08 crc kubenswrapper[5002]: I0930 12:40:08.839742 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"822ff6cf-9987-40ed-929e-615b255cc532","Type":"ContainerStarted","Data":"45b3318b059e4e93a0ee07696e577b9302554e21117c6e696f090f1d1023d268"} Sep 30 12:40:08 crc kubenswrapper[5002]: I0930 12:40:08.844001 5002 generic.go:334] "Generic (PLEG): container finished" podID="41c5acb5-451a-46fe-8384-be7514e49ea9" containerID="ea585a3f09929f61b29d55fd2e68fe5bdb058d85d58893f6c8db15ac0460e776" exitCode=0 Sep 30 12:40:08 crc kubenswrapper[5002]: I0930 12:40:08.844062 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"41c5acb5-451a-46fe-8384-be7514e49ea9","Type":"ContainerDied","Data":"ea585a3f09929f61b29d55fd2e68fe5bdb058d85d58893f6c8db15ac0460e776"} Sep 30 12:40:08 crc kubenswrapper[5002]: I0930 12:40:08.855513 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 12:40:08 crc kubenswrapper[5002]: I0930 12:40:08.954782 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41c5acb5-451a-46fe-8384-be7514e49ea9-combined-ca-bundle\") pod \"41c5acb5-451a-46fe-8384-be7514e49ea9\" (UID: \"41c5acb5-451a-46fe-8384-be7514e49ea9\") " Sep 30 12:40:08 crc kubenswrapper[5002]: I0930 12:40:08.955172 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41c5acb5-451a-46fe-8384-be7514e49ea9-config-data\") pod \"41c5acb5-451a-46fe-8384-be7514e49ea9\" (UID: \"41c5acb5-451a-46fe-8384-be7514e49ea9\") " Sep 30 12:40:08 crc kubenswrapper[5002]: I0930 12:40:08.955204 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9cpg7\" (UniqueName: \"kubernetes.io/projected/41c5acb5-451a-46fe-8384-be7514e49ea9-kube-api-access-9cpg7\") pod \"41c5acb5-451a-46fe-8384-be7514e49ea9\" (UID: \"41c5acb5-451a-46fe-8384-be7514e49ea9\") " Sep 30 12:40:08 crc kubenswrapper[5002]: I0930 12:40:08.959413 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41c5acb5-451a-46fe-8384-be7514e49ea9-kube-api-access-9cpg7" (OuterVolumeSpecName: "kube-api-access-9cpg7") pod "41c5acb5-451a-46fe-8384-be7514e49ea9" (UID: "41c5acb5-451a-46fe-8384-be7514e49ea9"). InnerVolumeSpecName "kube-api-access-9cpg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:40:08 crc kubenswrapper[5002]: I0930 12:40:08.984671 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41c5acb5-451a-46fe-8384-be7514e49ea9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "41c5acb5-451a-46fe-8384-be7514e49ea9" (UID: "41c5acb5-451a-46fe-8384-be7514e49ea9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:40:08 crc kubenswrapper[5002]: I0930 12:40:08.994872 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41c5acb5-451a-46fe-8384-be7514e49ea9-config-data" (OuterVolumeSpecName: "config-data") pod "41c5acb5-451a-46fe-8384-be7514e49ea9" (UID: "41c5acb5-451a-46fe-8384-be7514e49ea9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:40:09 crc kubenswrapper[5002]: I0930 12:40:09.058491 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41c5acb5-451a-46fe-8384-be7514e49ea9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:09 crc kubenswrapper[5002]: I0930 12:40:09.058522 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41c5acb5-451a-46fe-8384-be7514e49ea9-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:09 crc kubenswrapper[5002]: I0930 12:40:09.058532 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9cpg7\" (UniqueName: \"kubernetes.io/projected/41c5acb5-451a-46fe-8384-be7514e49ea9-kube-api-access-9cpg7\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:09 crc kubenswrapper[5002]: I0930 12:40:09.852333 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"822ff6cf-9987-40ed-929e-615b255cc532","Type":"ContainerStarted","Data":"18e893aa547d66a2574b8e68ccaf40108856de0578e0810823b6b492704a5941"} Sep 30 12:40:09 crc kubenswrapper[5002]: I0930 12:40:09.852959 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"822ff6cf-9987-40ed-929e-615b255cc532","Type":"ContainerStarted","Data":"a496b659322eeef05e33d235aa9f79e500568b4d887ac7a5960332e1cae3a17e"} Sep 30 12:40:09 crc kubenswrapper[5002]: I0930 12:40:09.854650 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"41c5acb5-451a-46fe-8384-be7514e49ea9","Type":"ContainerDied","Data":"50bf6de3989792d742c856199b54dded5f311d34b1d18e2595320653b057dc7d"} Sep 30 12:40:09 crc kubenswrapper[5002]: I0930 12:40:09.854686 5002 scope.go:117] "RemoveContainer" containerID="ea585a3f09929f61b29d55fd2e68fe5bdb058d85d58893f6c8db15ac0460e776" Sep 30 12:40:09 crc kubenswrapper[5002]: I0930 12:40:09.854687 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 12:40:09 crc kubenswrapper[5002]: I0930 12:40:09.879974 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.8799534700000002 podStartE2EDuration="2.87995347s" podCreationTimestamp="2025-09-30 12:40:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:40:09.875543599 +0000 UTC m=+1184.125225775" watchObservedRunningTime="2025-09-30 12:40:09.87995347 +0000 UTC m=+1184.129635636" Sep 30 12:40:09 crc kubenswrapper[5002]: I0930 12:40:09.898452 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 12:40:09 crc kubenswrapper[5002]: I0930 12:40:09.913901 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 12:40:09 crc kubenswrapper[5002]: I0930 12:40:09.924633 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 12:40:09 crc kubenswrapper[5002]: E0930 12:40:09.925275 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41c5acb5-451a-46fe-8384-be7514e49ea9" containerName="nova-scheduler-scheduler" Sep 30 12:40:09 crc kubenswrapper[5002]: I0930 12:40:09.925309 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="41c5acb5-451a-46fe-8384-be7514e49ea9" containerName="nova-scheduler-scheduler" Sep 30 12:40:09 crc kubenswrapper[5002]: I0930 12:40:09.925636 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="41c5acb5-451a-46fe-8384-be7514e49ea9" containerName="nova-scheduler-scheduler" Sep 30 12:40:09 crc kubenswrapper[5002]: I0930 12:40:09.926711 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 12:40:09 crc kubenswrapper[5002]: I0930 12:40:09.928837 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 12:40:09 crc kubenswrapper[5002]: I0930 12:40:09.940623 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 12:40:09 crc kubenswrapper[5002]: I0930 12:40:09.976460 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd4640b7-43e4-4029-88fa-bb9c4a293794-config-data\") pod \"nova-scheduler-0\" (UID: \"dd4640b7-43e4-4029-88fa-bb9c4a293794\") " pod="openstack/nova-scheduler-0" Sep 30 12:40:09 crc kubenswrapper[5002]: I0930 12:40:09.976695 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd4640b7-43e4-4029-88fa-bb9c4a293794-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"dd4640b7-43e4-4029-88fa-bb9c4a293794\") " pod="openstack/nova-scheduler-0" Sep 30 12:40:09 crc kubenswrapper[5002]: I0930 12:40:09.976768 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5cn8d\" (UniqueName: \"kubernetes.io/projected/dd4640b7-43e4-4029-88fa-bb9c4a293794-kube-api-access-5cn8d\") pod \"nova-scheduler-0\" (UID: \"dd4640b7-43e4-4029-88fa-bb9c4a293794\") " pod="openstack/nova-scheduler-0" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.078420 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd4640b7-43e4-4029-88fa-bb9c4a293794-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"dd4640b7-43e4-4029-88fa-bb9c4a293794\") " pod="openstack/nova-scheduler-0" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.078569 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5cn8d\" (UniqueName: \"kubernetes.io/projected/dd4640b7-43e4-4029-88fa-bb9c4a293794-kube-api-access-5cn8d\") pod \"nova-scheduler-0\" (UID: \"dd4640b7-43e4-4029-88fa-bb9c4a293794\") " pod="openstack/nova-scheduler-0" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.078673 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd4640b7-43e4-4029-88fa-bb9c4a293794-config-data\") pod \"nova-scheduler-0\" (UID: \"dd4640b7-43e4-4029-88fa-bb9c4a293794\") " pod="openstack/nova-scheduler-0" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.083908 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd4640b7-43e4-4029-88fa-bb9c4a293794-config-data\") pod \"nova-scheduler-0\" (UID: \"dd4640b7-43e4-4029-88fa-bb9c4a293794\") " pod="openstack/nova-scheduler-0" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.084114 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd4640b7-43e4-4029-88fa-bb9c4a293794-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"dd4640b7-43e4-4029-88fa-bb9c4a293794\") " pod="openstack/nova-scheduler-0" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.102172 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5cn8d\" (UniqueName: \"kubernetes.io/projected/dd4640b7-43e4-4029-88fa-bb9c4a293794-kube-api-access-5cn8d\") pod \"nova-scheduler-0\" (UID: \"dd4640b7-43e4-4029-88fa-bb9c4a293794\") " pod="openstack/nova-scheduler-0" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.163220 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="b23ec22e-90c8-41b7-bdc8-4861c7ec9c83" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.195:8775/\": read tcp 10.217.0.2:55120->10.217.0.195:8775: read: connection reset by peer" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.163422 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="b23ec22e-90c8-41b7-bdc8-4861c7ec9c83" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.195:8775/\": read tcp 10.217.0.2:55124->10.217.0.195:8775: read: connection reset by peer" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.250333 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.655488 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.695921 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41c5acb5-451a-46fe-8384-be7514e49ea9" path="/var/lib/kubelet/pods/41c5acb5-451a-46fe-8384-be7514e49ea9/volumes" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.790863 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rtk9x\" (UniqueName: \"kubernetes.io/projected/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-kube-api-access-rtk9x\") pod \"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83\" (UID: \"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83\") " Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.791620 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-nova-metadata-tls-certs\") pod \"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83\" (UID: \"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83\") " Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.791737 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-logs\") pod \"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83\" (UID: \"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83\") " Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.792152 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-logs" (OuterVolumeSpecName: "logs") pod "b23ec22e-90c8-41b7-bdc8-4861c7ec9c83" (UID: "b23ec22e-90c8-41b7-bdc8-4861c7ec9c83"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.792231 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-config-data\") pod \"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83\" (UID: \"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83\") " Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.792257 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-combined-ca-bundle\") pod \"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83\" (UID: \"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83\") " Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.792669 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.792924 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-logs\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:10 crc kubenswrapper[5002]: W0930 12:40:10.795690 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddd4640b7_43e4_4029_88fa_bb9c4a293794.slice/crio-b5939d7ec692630761e9c9e086c049b556a3b438f03023025acc29c2aba34b51 WatchSource:0}: Error finding container b5939d7ec692630761e9c9e086c049b556a3b438f03023025acc29c2aba34b51: Status 404 returned error can't find the container with id b5939d7ec692630761e9c9e086c049b556a3b438f03023025acc29c2aba34b51 Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.797981 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-kube-api-access-rtk9x" (OuterVolumeSpecName: "kube-api-access-rtk9x") pod "b23ec22e-90c8-41b7-bdc8-4861c7ec9c83" (UID: "b23ec22e-90c8-41b7-bdc8-4861c7ec9c83"). InnerVolumeSpecName "kube-api-access-rtk9x". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.823886 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-config-data" (OuterVolumeSpecName: "config-data") pod "b23ec22e-90c8-41b7-bdc8-4861c7ec9c83" (UID: "b23ec22e-90c8-41b7-bdc8-4861c7ec9c83"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.848423 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b23ec22e-90c8-41b7-bdc8-4861c7ec9c83" (UID: "b23ec22e-90c8-41b7-bdc8-4861c7ec9c83"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.852921 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "b23ec22e-90c8-41b7-bdc8-4861c7ec9c83" (UID: "b23ec22e-90c8-41b7-bdc8-4861c7ec9c83"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.867634 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"dd4640b7-43e4-4029-88fa-bb9c4a293794","Type":"ContainerStarted","Data":"b5939d7ec692630761e9c9e086c049b556a3b438f03023025acc29c2aba34b51"} Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.869675 5002 generic.go:334] "Generic (PLEG): container finished" podID="b23ec22e-90c8-41b7-bdc8-4861c7ec9c83" containerID="07a85959d4119add40b7fa627c82be82afc4eca869d90b2ae9adee1e50c283ef" exitCode=0 Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.869734 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83","Type":"ContainerDied","Data":"07a85959d4119add40b7fa627c82be82afc4eca869d90b2ae9adee1e50c283ef"} Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.869755 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b23ec22e-90c8-41b7-bdc8-4861c7ec9c83","Type":"ContainerDied","Data":"c8a4e29eeaa937bc31908adaa6785f14434ecc16cc601d003ef174de8fe6b7a4"} Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.869773 5002 scope.go:117] "RemoveContainer" containerID="07a85959d4119add40b7fa627c82be82afc4eca869d90b2ae9adee1e50c283ef" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.869877 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.896714 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.896749 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.896765 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rtk9x\" (UniqueName: \"kubernetes.io/projected/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-kube-api-access-rtk9x\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.896781 5002 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.916164 5002 scope.go:117] "RemoveContainer" containerID="8522f43ae6027cb6e0fbd468460668db4682d9c645ae511d039a53059ab12116" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.916254 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.933122 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.942519 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.948913 5002 scope.go:117] "RemoveContainer" containerID="07a85959d4119add40b7fa627c82be82afc4eca869d90b2ae9adee1e50c283ef" Sep 30 12:40:10 crc kubenswrapper[5002]: E0930 12:40:10.949439 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07a85959d4119add40b7fa627c82be82afc4eca869d90b2ae9adee1e50c283ef\": container with ID starting with 07a85959d4119add40b7fa627c82be82afc4eca869d90b2ae9adee1e50c283ef not found: ID does not exist" containerID="07a85959d4119add40b7fa627c82be82afc4eca869d90b2ae9adee1e50c283ef" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.949525 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07a85959d4119add40b7fa627c82be82afc4eca869d90b2ae9adee1e50c283ef"} err="failed to get container status \"07a85959d4119add40b7fa627c82be82afc4eca869d90b2ae9adee1e50c283ef\": rpc error: code = NotFound desc = could not find container \"07a85959d4119add40b7fa627c82be82afc4eca869d90b2ae9adee1e50c283ef\": container with ID starting with 07a85959d4119add40b7fa627c82be82afc4eca869d90b2ae9adee1e50c283ef not found: ID does not exist" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.949580 5002 scope.go:117] "RemoveContainer" containerID="8522f43ae6027cb6e0fbd468460668db4682d9c645ae511d039a53059ab12116" Sep 30 12:40:10 crc kubenswrapper[5002]: E0930 12:40:10.949886 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8522f43ae6027cb6e0fbd468460668db4682d9c645ae511d039a53059ab12116\": container with ID starting with 8522f43ae6027cb6e0fbd468460668db4682d9c645ae511d039a53059ab12116 not found: ID does not exist" containerID="8522f43ae6027cb6e0fbd468460668db4682d9c645ae511d039a53059ab12116" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.949916 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8522f43ae6027cb6e0fbd468460668db4682d9c645ae511d039a53059ab12116"} err="failed to get container status \"8522f43ae6027cb6e0fbd468460668db4682d9c645ae511d039a53059ab12116\": rpc error: code = NotFound desc = could not find container \"8522f43ae6027cb6e0fbd468460668db4682d9c645ae511d039a53059ab12116\": container with ID starting with 8522f43ae6027cb6e0fbd468460668db4682d9c645ae511d039a53059ab12116 not found: ID does not exist" Sep 30 12:40:10 crc kubenswrapper[5002]: E0930 12:40:10.974504 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b23ec22e-90c8-41b7-bdc8-4861c7ec9c83" containerName="nova-metadata-log" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.974570 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b23ec22e-90c8-41b7-bdc8-4861c7ec9c83" containerName="nova-metadata-log" Sep 30 12:40:10 crc kubenswrapper[5002]: E0930 12:40:10.974601 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b23ec22e-90c8-41b7-bdc8-4861c7ec9c83" containerName="nova-metadata-metadata" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.974610 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b23ec22e-90c8-41b7-bdc8-4861c7ec9c83" containerName="nova-metadata-metadata" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.975520 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="b23ec22e-90c8-41b7-bdc8-4861c7ec9c83" containerName="nova-metadata-log" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.975552 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="b23ec22e-90c8-41b7-bdc8-4861c7ec9c83" containerName="nova-metadata-metadata" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.977762 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.977884 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.980921 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 12:40:10 crc kubenswrapper[5002]: I0930 12:40:10.981137 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 30 12:40:11 crc kubenswrapper[5002]: I0930 12:40:11.100446 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc36d9f0-c2f5-463f-8f7b-3824c9bd9629-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cc36d9f0-c2f5-463f-8f7b-3824c9bd9629\") " pod="openstack/nova-metadata-0" Sep 30 12:40:11 crc kubenswrapper[5002]: I0930 12:40:11.100560 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc36d9f0-c2f5-463f-8f7b-3824c9bd9629-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"cc36d9f0-c2f5-463f-8f7b-3824c9bd9629\") " pod="openstack/nova-metadata-0" Sep 30 12:40:11 crc kubenswrapper[5002]: I0930 12:40:11.100597 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-db7nt\" (UniqueName: \"kubernetes.io/projected/cc36d9f0-c2f5-463f-8f7b-3824c9bd9629-kube-api-access-db7nt\") pod \"nova-metadata-0\" (UID: \"cc36d9f0-c2f5-463f-8f7b-3824c9bd9629\") " pod="openstack/nova-metadata-0" Sep 30 12:40:11 crc kubenswrapper[5002]: I0930 12:40:11.100654 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc36d9f0-c2f5-463f-8f7b-3824c9bd9629-logs\") pod \"nova-metadata-0\" (UID: \"cc36d9f0-c2f5-463f-8f7b-3824c9bd9629\") " pod="openstack/nova-metadata-0" Sep 30 12:40:11 crc kubenswrapper[5002]: I0930 12:40:11.100730 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc36d9f0-c2f5-463f-8f7b-3824c9bd9629-config-data\") pod \"nova-metadata-0\" (UID: \"cc36d9f0-c2f5-463f-8f7b-3824c9bd9629\") " pod="openstack/nova-metadata-0" Sep 30 12:40:11 crc kubenswrapper[5002]: I0930 12:40:11.202722 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc36d9f0-c2f5-463f-8f7b-3824c9bd9629-config-data\") pod \"nova-metadata-0\" (UID: \"cc36d9f0-c2f5-463f-8f7b-3824c9bd9629\") " pod="openstack/nova-metadata-0" Sep 30 12:40:11 crc kubenswrapper[5002]: I0930 12:40:11.203107 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc36d9f0-c2f5-463f-8f7b-3824c9bd9629-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cc36d9f0-c2f5-463f-8f7b-3824c9bd9629\") " pod="openstack/nova-metadata-0" Sep 30 12:40:11 crc kubenswrapper[5002]: I0930 12:40:11.203135 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc36d9f0-c2f5-463f-8f7b-3824c9bd9629-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"cc36d9f0-c2f5-463f-8f7b-3824c9bd9629\") " pod="openstack/nova-metadata-0" Sep 30 12:40:11 crc kubenswrapper[5002]: I0930 12:40:11.203167 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-db7nt\" (UniqueName: \"kubernetes.io/projected/cc36d9f0-c2f5-463f-8f7b-3824c9bd9629-kube-api-access-db7nt\") pod \"nova-metadata-0\" (UID: \"cc36d9f0-c2f5-463f-8f7b-3824c9bd9629\") " pod="openstack/nova-metadata-0" Sep 30 12:40:11 crc kubenswrapper[5002]: I0930 12:40:11.203226 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc36d9f0-c2f5-463f-8f7b-3824c9bd9629-logs\") pod \"nova-metadata-0\" (UID: \"cc36d9f0-c2f5-463f-8f7b-3824c9bd9629\") " pod="openstack/nova-metadata-0" Sep 30 12:40:11 crc kubenswrapper[5002]: I0930 12:40:11.203576 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc36d9f0-c2f5-463f-8f7b-3824c9bd9629-logs\") pod \"nova-metadata-0\" (UID: \"cc36d9f0-c2f5-463f-8f7b-3824c9bd9629\") " pod="openstack/nova-metadata-0" Sep 30 12:40:11 crc kubenswrapper[5002]: I0930 12:40:11.206680 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc36d9f0-c2f5-463f-8f7b-3824c9bd9629-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"cc36d9f0-c2f5-463f-8f7b-3824c9bd9629\") " pod="openstack/nova-metadata-0" Sep 30 12:40:11 crc kubenswrapper[5002]: I0930 12:40:11.206736 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc36d9f0-c2f5-463f-8f7b-3824c9bd9629-config-data\") pod \"nova-metadata-0\" (UID: \"cc36d9f0-c2f5-463f-8f7b-3824c9bd9629\") " pod="openstack/nova-metadata-0" Sep 30 12:40:11 crc kubenswrapper[5002]: I0930 12:40:11.208020 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc36d9f0-c2f5-463f-8f7b-3824c9bd9629-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cc36d9f0-c2f5-463f-8f7b-3824c9bd9629\") " pod="openstack/nova-metadata-0" Sep 30 12:40:11 crc kubenswrapper[5002]: I0930 12:40:11.219899 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-db7nt\" (UniqueName: \"kubernetes.io/projected/cc36d9f0-c2f5-463f-8f7b-3824c9bd9629-kube-api-access-db7nt\") pod \"nova-metadata-0\" (UID: \"cc36d9f0-c2f5-463f-8f7b-3824c9bd9629\") " pod="openstack/nova-metadata-0" Sep 30 12:40:11 crc kubenswrapper[5002]: I0930 12:40:11.299938 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 12:40:11 crc kubenswrapper[5002]: I0930 12:40:11.748524 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 12:40:11 crc kubenswrapper[5002]: W0930 12:40:11.752924 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcc36d9f0_c2f5_463f_8f7b_3824c9bd9629.slice/crio-3c5cd6863c47b681c5800aec78f2532cb7a7f3b3e4f589092dd684c9ecdd1971 WatchSource:0}: Error finding container 3c5cd6863c47b681c5800aec78f2532cb7a7f3b3e4f589092dd684c9ecdd1971: Status 404 returned error can't find the container with id 3c5cd6863c47b681c5800aec78f2532cb7a7f3b3e4f589092dd684c9ecdd1971 Sep 30 12:40:11 crc kubenswrapper[5002]: I0930 12:40:11.883466 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cc36d9f0-c2f5-463f-8f7b-3824c9bd9629","Type":"ContainerStarted","Data":"3c5cd6863c47b681c5800aec78f2532cb7a7f3b3e4f589092dd684c9ecdd1971"} Sep 30 12:40:11 crc kubenswrapper[5002]: I0930 12:40:11.886678 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"dd4640b7-43e4-4029-88fa-bb9c4a293794","Type":"ContainerStarted","Data":"a48d6f69d9292f8b3867ba987279639e41fac089e075309a832fb0fecdab3db8"} Sep 30 12:40:11 crc kubenswrapper[5002]: I0930 12:40:11.912058 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.912040862 podStartE2EDuration="2.912040862s" podCreationTimestamp="2025-09-30 12:40:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:40:11.904615796 +0000 UTC m=+1186.154297952" watchObservedRunningTime="2025-09-30 12:40:11.912040862 +0000 UTC m=+1186.161723008" Sep 30 12:40:12 crc kubenswrapper[5002]: I0930 12:40:12.692435 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b23ec22e-90c8-41b7-bdc8-4861c7ec9c83" path="/var/lib/kubelet/pods/b23ec22e-90c8-41b7-bdc8-4861c7ec9c83/volumes" Sep 30 12:40:12 crc kubenswrapper[5002]: I0930 12:40:12.898006 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cc36d9f0-c2f5-463f-8f7b-3824c9bd9629","Type":"ContainerStarted","Data":"bf32b44c0e0fbd910f8140e216424c25001f06b554e691ecf49d3b9e8796159c"} Sep 30 12:40:12 crc kubenswrapper[5002]: I0930 12:40:12.898083 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cc36d9f0-c2f5-463f-8f7b-3824c9bd9629","Type":"ContainerStarted","Data":"b7b28df8000ef4f1e9dc9dd6425d4ddcea3279393229c405c1429f86d269125b"} Sep 30 12:40:12 crc kubenswrapper[5002]: I0930 12:40:12.940044 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.940016122 podStartE2EDuration="2.940016122s" podCreationTimestamp="2025-09-30 12:40:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:40:12.923138506 +0000 UTC m=+1187.172820732" watchObservedRunningTime="2025-09-30 12:40:12.940016122 +0000 UTC m=+1187.189698308" Sep 30 12:40:15 crc kubenswrapper[5002]: I0930 12:40:15.250933 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 12:40:16 crc kubenswrapper[5002]: I0930 12:40:16.300103 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 12:40:16 crc kubenswrapper[5002]: I0930 12:40:16.300161 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 12:40:18 crc kubenswrapper[5002]: I0930 12:40:18.197042 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 12:40:18 crc kubenswrapper[5002]: I0930 12:40:18.197648 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 12:40:19 crc kubenswrapper[5002]: I0930 12:40:19.217660 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="822ff6cf-9987-40ed-929e-615b255cc532" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.207:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 12:40:19 crc kubenswrapper[5002]: I0930 12:40:19.217696 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="822ff6cf-9987-40ed-929e-615b255cc532" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.207:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 12:40:20 crc kubenswrapper[5002]: I0930 12:40:20.251396 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 30 12:40:20 crc kubenswrapper[5002]: I0930 12:40:20.281257 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 30 12:40:21 crc kubenswrapper[5002]: I0930 12:40:21.001866 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 30 12:40:21 crc kubenswrapper[5002]: I0930 12:40:21.300504 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 12:40:21 crc kubenswrapper[5002]: I0930 12:40:21.300545 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 12:40:22 crc kubenswrapper[5002]: I0930 12:40:22.316720 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="cc36d9f0-c2f5-463f-8f7b-3824c9bd9629" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.209:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 12:40:22 crc kubenswrapper[5002]: I0930 12:40:22.316731 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="cc36d9f0-c2f5-463f-8f7b-3824c9bd9629" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.209:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 12:40:27 crc kubenswrapper[5002]: I0930 12:40:27.106141 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 30 12:40:28 crc kubenswrapper[5002]: I0930 12:40:28.214855 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 12:40:28 crc kubenswrapper[5002]: I0930 12:40:28.215293 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 12:40:28 crc kubenswrapper[5002]: I0930 12:40:28.224392 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 12:40:28 crc kubenswrapper[5002]: I0930 12:40:28.225051 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 12:40:29 crc kubenswrapper[5002]: I0930 12:40:29.051067 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 12:40:29 crc kubenswrapper[5002]: I0930 12:40:29.056720 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 12:40:31 crc kubenswrapper[5002]: I0930 12:40:31.308370 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 12:40:31 crc kubenswrapper[5002]: I0930 12:40:31.309167 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 12:40:31 crc kubenswrapper[5002]: I0930 12:40:31.317335 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 12:40:31 crc kubenswrapper[5002]: I0930 12:40:31.318000 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 12:40:39 crc kubenswrapper[5002]: I0930 12:40:39.341818 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 12:40:40 crc kubenswrapper[5002]: I0930 12:40:40.225608 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 12:40:43 crc kubenswrapper[5002]: I0930 12:40:43.864917 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="f80ae063-860a-4997-9c9f-57bc3a850e37" containerName="rabbitmq" containerID="cri-o://3ddabafa91a278a090d2fcd07f8418fee0a2ef9a8b6d78e23fa0832bc69d145c" gracePeriod=604796 Sep 30 12:40:44 crc kubenswrapper[5002]: I0930 12:40:44.457405 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="26e7b906-c14a-4084-926c-2d2c7ce201be" containerName="rabbitmq" containerID="cri-o://103645d4b02ad1c1e03f2de395b434a5b876e573ea9f8681428473bc48a29e93" gracePeriod=604796 Sep 30 12:40:44 crc kubenswrapper[5002]: I0930 12:40:44.952039 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="26e7b906-c14a-4084-926c-2d2c7ce201be" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.100:5671: connect: connection refused" Sep 30 12:40:45 crc kubenswrapper[5002]: I0930 12:40:45.262760 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="f80ae063-860a-4997-9c9f-57bc3a850e37" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.101:5671: connect: connection refused" Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.273219 5002 generic.go:334] "Generic (PLEG): container finished" podID="f80ae063-860a-4997-9c9f-57bc3a850e37" containerID="3ddabafa91a278a090d2fcd07f8418fee0a2ef9a8b6d78e23fa0832bc69d145c" exitCode=0 Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.273278 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"f80ae063-860a-4997-9c9f-57bc3a850e37","Type":"ContainerDied","Data":"3ddabafa91a278a090d2fcd07f8418fee0a2ef9a8b6d78e23fa0832bc69d145c"} Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.559690 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.677346 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f80ae063-860a-4997-9c9f-57bc3a850e37-rabbitmq-plugins\") pod \"f80ae063-860a-4997-9c9f-57bc3a850e37\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.677453 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v5gmm\" (UniqueName: \"kubernetes.io/projected/f80ae063-860a-4997-9c9f-57bc3a850e37-kube-api-access-v5gmm\") pod \"f80ae063-860a-4997-9c9f-57bc3a850e37\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.677508 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f80ae063-860a-4997-9c9f-57bc3a850e37-pod-info\") pod \"f80ae063-860a-4997-9c9f-57bc3a850e37\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.677574 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f80ae063-860a-4997-9c9f-57bc3a850e37-rabbitmq-confd\") pod \"f80ae063-860a-4997-9c9f-57bc3a850e37\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.677618 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f80ae063-860a-4997-9c9f-57bc3a850e37-server-conf\") pod \"f80ae063-860a-4997-9c9f-57bc3a850e37\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.677658 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f80ae063-860a-4997-9c9f-57bc3a850e37-rabbitmq-tls\") pod \"f80ae063-860a-4997-9c9f-57bc3a850e37\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.677678 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"f80ae063-860a-4997-9c9f-57bc3a850e37\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.677717 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f80ae063-860a-4997-9c9f-57bc3a850e37-erlang-cookie-secret\") pod \"f80ae063-860a-4997-9c9f-57bc3a850e37\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.677764 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f80ae063-860a-4997-9c9f-57bc3a850e37-config-data\") pod \"f80ae063-860a-4997-9c9f-57bc3a850e37\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.677806 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f80ae063-860a-4997-9c9f-57bc3a850e37-plugins-conf\") pod \"f80ae063-860a-4997-9c9f-57bc3a850e37\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.677870 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f80ae063-860a-4997-9c9f-57bc3a850e37-rabbitmq-erlang-cookie\") pod \"f80ae063-860a-4997-9c9f-57bc3a850e37\" (UID: \"f80ae063-860a-4997-9c9f-57bc3a850e37\") " Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.678881 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f80ae063-860a-4997-9c9f-57bc3a850e37-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "f80ae063-860a-4997-9c9f-57bc3a850e37" (UID: "f80ae063-860a-4997-9c9f-57bc3a850e37"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.681858 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f80ae063-860a-4997-9c9f-57bc3a850e37-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "f80ae063-860a-4997-9c9f-57bc3a850e37" (UID: "f80ae063-860a-4997-9c9f-57bc3a850e37"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.682205 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f80ae063-860a-4997-9c9f-57bc3a850e37-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "f80ae063-860a-4997-9c9f-57bc3a850e37" (UID: "f80ae063-860a-4997-9c9f-57bc3a850e37"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.690358 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "f80ae063-860a-4997-9c9f-57bc3a850e37" (UID: "f80ae063-860a-4997-9c9f-57bc3a850e37"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.692621 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f80ae063-860a-4997-9c9f-57bc3a850e37-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "f80ae063-860a-4997-9c9f-57bc3a850e37" (UID: "f80ae063-860a-4997-9c9f-57bc3a850e37"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.700656 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/f80ae063-860a-4997-9c9f-57bc3a850e37-pod-info" (OuterVolumeSpecName: "pod-info") pod "f80ae063-860a-4997-9c9f-57bc3a850e37" (UID: "f80ae063-860a-4997-9c9f-57bc3a850e37"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.700760 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f80ae063-860a-4997-9c9f-57bc3a850e37-kube-api-access-v5gmm" (OuterVolumeSpecName: "kube-api-access-v5gmm") pod "f80ae063-860a-4997-9c9f-57bc3a850e37" (UID: "f80ae063-860a-4997-9c9f-57bc3a850e37"). InnerVolumeSpecName "kube-api-access-v5gmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.715925 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f80ae063-860a-4997-9c9f-57bc3a850e37-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "f80ae063-860a-4997-9c9f-57bc3a850e37" (UID: "f80ae063-860a-4997-9c9f-57bc3a850e37"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.731175 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f80ae063-860a-4997-9c9f-57bc3a850e37-config-data" (OuterVolumeSpecName: "config-data") pod "f80ae063-860a-4997-9c9f-57bc3a850e37" (UID: "f80ae063-860a-4997-9c9f-57bc3a850e37"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.747884 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f80ae063-860a-4997-9c9f-57bc3a850e37-server-conf" (OuterVolumeSpecName: "server-conf") pod "f80ae063-860a-4997-9c9f-57bc3a850e37" (UID: "f80ae063-860a-4997-9c9f-57bc3a850e37"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.781236 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f80ae063-860a-4997-9c9f-57bc3a850e37-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.781295 5002 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f80ae063-860a-4997-9c9f-57bc3a850e37-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.781307 5002 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f80ae063-860a-4997-9c9f-57bc3a850e37-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.781316 5002 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f80ae063-860a-4997-9c9f-57bc3a850e37-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.781325 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v5gmm\" (UniqueName: \"kubernetes.io/projected/f80ae063-860a-4997-9c9f-57bc3a850e37-kube-api-access-v5gmm\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.781353 5002 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f80ae063-860a-4997-9c9f-57bc3a850e37-pod-info\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.781364 5002 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f80ae063-860a-4997-9c9f-57bc3a850e37-server-conf\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.781373 5002 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f80ae063-860a-4997-9c9f-57bc3a850e37-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.781392 5002 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.781401 5002 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f80ae063-860a-4997-9c9f-57bc3a850e37-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.803088 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f80ae063-860a-4997-9c9f-57bc3a850e37-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "f80ae063-860a-4997-9c9f-57bc3a850e37" (UID: "f80ae063-860a-4997-9c9f-57bc3a850e37"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.805517 5002 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.882717 5002 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f80ae063-860a-4997-9c9f-57bc3a850e37-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.882957 5002 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:50 crc kubenswrapper[5002]: I0930 12:40:50.997904 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.086139 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/26e7b906-c14a-4084-926c-2d2c7ce201be-pod-info\") pod \"26e7b906-c14a-4084-926c-2d2c7ce201be\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.086206 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/26e7b906-c14a-4084-926c-2d2c7ce201be-plugins-conf\") pod \"26e7b906-c14a-4084-926c-2d2c7ce201be\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.086245 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/26e7b906-c14a-4084-926c-2d2c7ce201be-rabbitmq-erlang-cookie\") pod \"26e7b906-c14a-4084-926c-2d2c7ce201be\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.086285 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/26e7b906-c14a-4084-926c-2d2c7ce201be-rabbitmq-tls\") pod \"26e7b906-c14a-4084-926c-2d2c7ce201be\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.086326 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/26e7b906-c14a-4084-926c-2d2c7ce201be-config-data\") pod \"26e7b906-c14a-4084-926c-2d2c7ce201be\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.086389 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfl92\" (UniqueName: \"kubernetes.io/projected/26e7b906-c14a-4084-926c-2d2c7ce201be-kube-api-access-cfl92\") pod \"26e7b906-c14a-4084-926c-2d2c7ce201be\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.086440 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/26e7b906-c14a-4084-926c-2d2c7ce201be-server-conf\") pod \"26e7b906-c14a-4084-926c-2d2c7ce201be\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.086491 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/26e7b906-c14a-4084-926c-2d2c7ce201be-rabbitmq-confd\") pod \"26e7b906-c14a-4084-926c-2d2c7ce201be\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.086510 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"26e7b906-c14a-4084-926c-2d2c7ce201be\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.086602 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/26e7b906-c14a-4084-926c-2d2c7ce201be-erlang-cookie-secret\") pod \"26e7b906-c14a-4084-926c-2d2c7ce201be\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.086632 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/26e7b906-c14a-4084-926c-2d2c7ce201be-rabbitmq-plugins\") pod \"26e7b906-c14a-4084-926c-2d2c7ce201be\" (UID: \"26e7b906-c14a-4084-926c-2d2c7ce201be\") " Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.087513 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/26e7b906-c14a-4084-926c-2d2c7ce201be-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "26e7b906-c14a-4084-926c-2d2c7ce201be" (UID: "26e7b906-c14a-4084-926c-2d2c7ce201be"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.087914 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26e7b906-c14a-4084-926c-2d2c7ce201be-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "26e7b906-c14a-4084-926c-2d2c7ce201be" (UID: "26e7b906-c14a-4084-926c-2d2c7ce201be"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.088183 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/26e7b906-c14a-4084-926c-2d2c7ce201be-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "26e7b906-c14a-4084-926c-2d2c7ce201be" (UID: "26e7b906-c14a-4084-926c-2d2c7ce201be"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.093228 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "persistence") pod "26e7b906-c14a-4084-926c-2d2c7ce201be" (UID: "26e7b906-c14a-4084-926c-2d2c7ce201be"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.093731 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26e7b906-c14a-4084-926c-2d2c7ce201be-kube-api-access-cfl92" (OuterVolumeSpecName: "kube-api-access-cfl92") pod "26e7b906-c14a-4084-926c-2d2c7ce201be" (UID: "26e7b906-c14a-4084-926c-2d2c7ce201be"). InnerVolumeSpecName "kube-api-access-cfl92". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.096819 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26e7b906-c14a-4084-926c-2d2c7ce201be-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "26e7b906-c14a-4084-926c-2d2c7ce201be" (UID: "26e7b906-c14a-4084-926c-2d2c7ce201be"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.099994 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/26e7b906-c14a-4084-926c-2d2c7ce201be-pod-info" (OuterVolumeSpecName: "pod-info") pod "26e7b906-c14a-4084-926c-2d2c7ce201be" (UID: "26e7b906-c14a-4084-926c-2d2c7ce201be"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.102770 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26e7b906-c14a-4084-926c-2d2c7ce201be-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "26e7b906-c14a-4084-926c-2d2c7ce201be" (UID: "26e7b906-c14a-4084-926c-2d2c7ce201be"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.136299 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26e7b906-c14a-4084-926c-2d2c7ce201be-config-data" (OuterVolumeSpecName: "config-data") pod "26e7b906-c14a-4084-926c-2d2c7ce201be" (UID: "26e7b906-c14a-4084-926c-2d2c7ce201be"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.141832 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26e7b906-c14a-4084-926c-2d2c7ce201be-server-conf" (OuterVolumeSpecName: "server-conf") pod "26e7b906-c14a-4084-926c-2d2c7ce201be" (UID: "26e7b906-c14a-4084-926c-2d2c7ce201be"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.189018 5002 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/26e7b906-c14a-4084-926c-2d2c7ce201be-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.189058 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/26e7b906-c14a-4084-926c-2d2c7ce201be-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.189070 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfl92\" (UniqueName: \"kubernetes.io/projected/26e7b906-c14a-4084-926c-2d2c7ce201be-kube-api-access-cfl92\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.189083 5002 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/26e7b906-c14a-4084-926c-2d2c7ce201be-server-conf\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.189115 5002 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.189128 5002 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/26e7b906-c14a-4084-926c-2d2c7ce201be-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.189140 5002 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/26e7b906-c14a-4084-926c-2d2c7ce201be-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.189151 5002 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/26e7b906-c14a-4084-926c-2d2c7ce201be-pod-info\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.189161 5002 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/26e7b906-c14a-4084-926c-2d2c7ce201be-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.189174 5002 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/26e7b906-c14a-4084-926c-2d2c7ce201be-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.210725 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26e7b906-c14a-4084-926c-2d2c7ce201be-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "26e7b906-c14a-4084-926c-2d2c7ce201be" (UID: "26e7b906-c14a-4084-926c-2d2c7ce201be"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.225398 5002 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.285534 5002 generic.go:334] "Generic (PLEG): container finished" podID="26e7b906-c14a-4084-926c-2d2c7ce201be" containerID="103645d4b02ad1c1e03f2de395b434a5b876e573ea9f8681428473bc48a29e93" exitCode=0 Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.285615 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"26e7b906-c14a-4084-926c-2d2c7ce201be","Type":"ContainerDied","Data":"103645d4b02ad1c1e03f2de395b434a5b876e573ea9f8681428473bc48a29e93"} Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.285644 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"26e7b906-c14a-4084-926c-2d2c7ce201be","Type":"ContainerDied","Data":"39fbc7a9975fe4800644205c409f29e430ad3430463dfd9a207d9fad892af902"} Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.285802 5002 scope.go:117] "RemoveContainer" containerID="103645d4b02ad1c1e03f2de395b434a5b876e573ea9f8681428473bc48a29e93" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.285898 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.290225 5002 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/26e7b906-c14a-4084-926c-2d2c7ce201be-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.290240 5002 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.291630 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"f80ae063-860a-4997-9c9f-57bc3a850e37","Type":"ContainerDied","Data":"3eaace2f9b0aed40df6a26c28fe5f12235e819b65e60e1d4aa62569ea4beb30a"} Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.291705 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.313906 5002 scope.go:117] "RemoveContainer" containerID="572a006f4b69c44220f3b6f964c85c72e58db661b0e2164d4eeeae9deaff60cf" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.350032 5002 scope.go:117] "RemoveContainer" containerID="103645d4b02ad1c1e03f2de395b434a5b876e573ea9f8681428473bc48a29e93" Sep 30 12:40:51 crc kubenswrapper[5002]: E0930 12:40:51.350643 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"103645d4b02ad1c1e03f2de395b434a5b876e573ea9f8681428473bc48a29e93\": container with ID starting with 103645d4b02ad1c1e03f2de395b434a5b876e573ea9f8681428473bc48a29e93 not found: ID does not exist" containerID="103645d4b02ad1c1e03f2de395b434a5b876e573ea9f8681428473bc48a29e93" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.350683 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"103645d4b02ad1c1e03f2de395b434a5b876e573ea9f8681428473bc48a29e93"} err="failed to get container status \"103645d4b02ad1c1e03f2de395b434a5b876e573ea9f8681428473bc48a29e93\": rpc error: code = NotFound desc = could not find container \"103645d4b02ad1c1e03f2de395b434a5b876e573ea9f8681428473bc48a29e93\": container with ID starting with 103645d4b02ad1c1e03f2de395b434a5b876e573ea9f8681428473bc48a29e93 not found: ID does not exist" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.350709 5002 scope.go:117] "RemoveContainer" containerID="572a006f4b69c44220f3b6f964c85c72e58db661b0e2164d4eeeae9deaff60cf" Sep 30 12:40:51 crc kubenswrapper[5002]: E0930 12:40:51.351022 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"572a006f4b69c44220f3b6f964c85c72e58db661b0e2164d4eeeae9deaff60cf\": container with ID starting with 572a006f4b69c44220f3b6f964c85c72e58db661b0e2164d4eeeae9deaff60cf not found: ID does not exist" containerID="572a006f4b69c44220f3b6f964c85c72e58db661b0e2164d4eeeae9deaff60cf" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.351057 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"572a006f4b69c44220f3b6f964c85c72e58db661b0e2164d4eeeae9deaff60cf"} err="failed to get container status \"572a006f4b69c44220f3b6f964c85c72e58db661b0e2164d4eeeae9deaff60cf\": rpc error: code = NotFound desc = could not find container \"572a006f4b69c44220f3b6f964c85c72e58db661b0e2164d4eeeae9deaff60cf\": container with ID starting with 572a006f4b69c44220f3b6f964c85c72e58db661b0e2164d4eeeae9deaff60cf not found: ID does not exist" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.351078 5002 scope.go:117] "RemoveContainer" containerID="3ddabafa91a278a090d2fcd07f8418fee0a2ef9a8b6d78e23fa0832bc69d145c" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.354713 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.372287 5002 scope.go:117] "RemoveContainer" containerID="71b9c072672276657eaef2a61497bf03d34a5781bcf2885cdb55475c104f5798" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.373197 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.389520 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.399604 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.406670 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 12:40:51 crc kubenswrapper[5002]: E0930 12:40:51.407046 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f80ae063-860a-4997-9c9f-57bc3a850e37" containerName="rabbitmq" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.407062 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f80ae063-860a-4997-9c9f-57bc3a850e37" containerName="rabbitmq" Sep 30 12:40:51 crc kubenswrapper[5002]: E0930 12:40:51.407090 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26e7b906-c14a-4084-926c-2d2c7ce201be" containerName="setup-container" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.407097 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="26e7b906-c14a-4084-926c-2d2c7ce201be" containerName="setup-container" Sep 30 12:40:51 crc kubenswrapper[5002]: E0930 12:40:51.407118 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26e7b906-c14a-4084-926c-2d2c7ce201be" containerName="rabbitmq" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.407123 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="26e7b906-c14a-4084-926c-2d2c7ce201be" containerName="rabbitmq" Sep 30 12:40:51 crc kubenswrapper[5002]: E0930 12:40:51.407134 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f80ae063-860a-4997-9c9f-57bc3a850e37" containerName="setup-container" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.407141 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f80ae063-860a-4997-9c9f-57bc3a850e37" containerName="setup-container" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.407344 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f80ae063-860a-4997-9c9f-57bc3a850e37" containerName="rabbitmq" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.407373 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="26e7b906-c14a-4084-926c-2d2c7ce201be" containerName="rabbitmq" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.408453 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.411721 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.411926 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.412078 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.412250 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.412408 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-s489v" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.412599 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.412753 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.413512 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.415057 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.417178 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.417348 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.417449 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.417576 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.417647 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.417738 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-sjczl" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.417878 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.426463 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.435351 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.493596 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4069e130-8a6c-4bf6-9885-b8e35857e519-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.493658 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.493698 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppc2f\" (UniqueName: \"kubernetes.io/projected/4069e130-8a6c-4bf6-9885-b8e35857e519-kube-api-access-ppc2f\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.493728 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4069e130-8a6c-4bf6-9885-b8e35857e519-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.493760 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9f3cd025-3ba6-453b-9224-ee63cf57890c-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.493876 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9f3cd025-3ba6-453b-9224-ee63cf57890c-pod-info\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.493961 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4069e130-8a6c-4bf6-9885-b8e35857e519-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.494015 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9f3cd025-3ba6-453b-9224-ee63cf57890c-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.494049 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4069e130-8a6c-4bf6-9885-b8e35857e519-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.494079 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9f3cd025-3ba6-453b-9224-ee63cf57890c-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.494142 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9f3cd025-3ba6-453b-9224-ee63cf57890c-config-data\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.494198 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9f3cd025-3ba6-453b-9224-ee63cf57890c-server-conf\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.494315 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.494356 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4069e130-8a6c-4bf6-9885-b8e35857e519-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.494414 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qhwv\" (UniqueName: \"kubernetes.io/projected/9f3cd025-3ba6-453b-9224-ee63cf57890c-kube-api-access-6qhwv\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.494446 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9f3cd025-3ba6-453b-9224-ee63cf57890c-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.494487 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4069e130-8a6c-4bf6-9885-b8e35857e519-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.494536 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9f3cd025-3ba6-453b-9224-ee63cf57890c-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.494708 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4069e130-8a6c-4bf6-9885-b8e35857e519-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.494746 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4069e130-8a6c-4bf6-9885-b8e35857e519-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.494771 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4069e130-8a6c-4bf6-9885-b8e35857e519-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.494863 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9f3cd025-3ba6-453b-9224-ee63cf57890c-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.596735 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4069e130-8a6c-4bf6-9885-b8e35857e519-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.597371 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.597426 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppc2f\" (UniqueName: \"kubernetes.io/projected/4069e130-8a6c-4bf6-9885-b8e35857e519-kube-api-access-ppc2f\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.597458 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4069e130-8a6c-4bf6-9885-b8e35857e519-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.597512 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9f3cd025-3ba6-453b-9224-ee63cf57890c-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.597558 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9f3cd025-3ba6-453b-9224-ee63cf57890c-pod-info\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.597605 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4069e130-8a6c-4bf6-9885-b8e35857e519-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.597640 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9f3cd025-3ba6-453b-9224-ee63cf57890c-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.597635 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.597688 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4069e130-8a6c-4bf6-9885-b8e35857e519-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.597720 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9f3cd025-3ba6-453b-9224-ee63cf57890c-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.597741 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9f3cd025-3ba6-453b-9224-ee63cf57890c-config-data\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.597762 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9f3cd025-3ba6-453b-9224-ee63cf57890c-server-conf\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.597803 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.597827 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4069e130-8a6c-4bf6-9885-b8e35857e519-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.597866 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qhwv\" (UniqueName: \"kubernetes.io/projected/9f3cd025-3ba6-453b-9224-ee63cf57890c-kube-api-access-6qhwv\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.597895 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9f3cd025-3ba6-453b-9224-ee63cf57890c-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.597918 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4069e130-8a6c-4bf6-9885-b8e35857e519-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.597940 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9f3cd025-3ba6-453b-9224-ee63cf57890c-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.597994 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4069e130-8a6c-4bf6-9885-b8e35857e519-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.598024 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4069e130-8a6c-4bf6-9885-b8e35857e519-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.598046 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4069e130-8a6c-4bf6-9885-b8e35857e519-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.598072 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9f3cd025-3ba6-453b-9224-ee63cf57890c-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.598309 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9f3cd025-3ba6-453b-9224-ee63cf57890c-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.598723 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9f3cd025-3ba6-453b-9224-ee63cf57890c-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.598767 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4069e130-8a6c-4bf6-9885-b8e35857e519-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.598894 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.599618 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9f3cd025-3ba6-453b-9224-ee63cf57890c-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.599838 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4069e130-8a6c-4bf6-9885-b8e35857e519-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.600031 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4069e130-8a6c-4bf6-9885-b8e35857e519-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.600415 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4069e130-8a6c-4bf6-9885-b8e35857e519-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.600870 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4069e130-8a6c-4bf6-9885-b8e35857e519-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.601881 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9f3cd025-3ba6-453b-9224-ee63cf57890c-server-conf\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.601985 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9f3cd025-3ba6-453b-9224-ee63cf57890c-config-data\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.602135 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9f3cd025-3ba6-453b-9224-ee63cf57890c-pod-info\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.608063 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4069e130-8a6c-4bf6-9885-b8e35857e519-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.608510 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9f3cd025-3ba6-453b-9224-ee63cf57890c-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.610462 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4069e130-8a6c-4bf6-9885-b8e35857e519-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.611553 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4069e130-8a6c-4bf6-9885-b8e35857e519-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.619148 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9f3cd025-3ba6-453b-9224-ee63cf57890c-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.626039 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9f3cd025-3ba6-453b-9224-ee63cf57890c-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.626814 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4069e130-8a6c-4bf6-9885-b8e35857e519-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.628580 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qhwv\" (UniqueName: \"kubernetes.io/projected/9f3cd025-3ba6-453b-9224-ee63cf57890c-kube-api-access-6qhwv\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.633405 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppc2f\" (UniqueName: \"kubernetes.io/projected/4069e130-8a6c-4bf6-9885-b8e35857e519-kube-api-access-ppc2f\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.648748 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"9f3cd025-3ba6-453b-9224-ee63cf57890c\") " pod="openstack/rabbitmq-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.659073 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"4069e130-8a6c-4bf6-9885-b8e35857e519\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.726357 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:40:51 crc kubenswrapper[5002]: I0930 12:40:51.739031 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 12:40:52 crc kubenswrapper[5002]: I0930 12:40:52.193756 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 12:40:52 crc kubenswrapper[5002]: W0930 12:40:52.197623 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4069e130_8a6c_4bf6_9885_b8e35857e519.slice/crio-61c907f9e45b3038a677788f2e8be1b699a0586989e47f6ecaf594f4d9edcae4 WatchSource:0}: Error finding container 61c907f9e45b3038a677788f2e8be1b699a0586989e47f6ecaf594f4d9edcae4: Status 404 returned error can't find the container with id 61c907f9e45b3038a677788f2e8be1b699a0586989e47f6ecaf594f4d9edcae4 Sep 30 12:40:52 crc kubenswrapper[5002]: I0930 12:40:52.301009 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 12:40:52 crc kubenswrapper[5002]: I0930 12:40:52.309260 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"4069e130-8a6c-4bf6-9885-b8e35857e519","Type":"ContainerStarted","Data":"61c907f9e45b3038a677788f2e8be1b699a0586989e47f6ecaf594f4d9edcae4"} Sep 30 12:40:52 crc kubenswrapper[5002]: I0930 12:40:52.692178 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26e7b906-c14a-4084-926c-2d2c7ce201be" path="/var/lib/kubelet/pods/26e7b906-c14a-4084-926c-2d2c7ce201be/volumes" Sep 30 12:40:52 crc kubenswrapper[5002]: I0930 12:40:52.694923 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f80ae063-860a-4997-9c9f-57bc3a850e37" path="/var/lib/kubelet/pods/f80ae063-860a-4997-9c9f-57bc3a850e37/volumes" Sep 30 12:40:53 crc kubenswrapper[5002]: I0930 12:40:53.314505 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-mg4dg"] Sep 30 12:40:53 crc kubenswrapper[5002]: I0930 12:40:53.316310 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" Sep 30 12:40:53 crc kubenswrapper[5002]: I0930 12:40:53.319871 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Sep 30 12:40:53 crc kubenswrapper[5002]: I0930 12:40:53.324532 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9f3cd025-3ba6-453b-9224-ee63cf57890c","Type":"ContainerStarted","Data":"71683fa88a8d46889041e673a9d26be32e82426ac6b2937fcc539eb48193ccf8"} Sep 30 12:40:53 crc kubenswrapper[5002]: I0930 12:40:53.324590 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9f3cd025-3ba6-453b-9224-ee63cf57890c","Type":"ContainerStarted","Data":"34265abe71fac91fb1a183610c6b4cb2d39c0451a8a015fe31c5bd6b89be6061"} Sep 30 12:40:53 crc kubenswrapper[5002]: I0930 12:40:53.328123 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"4069e130-8a6c-4bf6-9885-b8e35857e519","Type":"ContainerStarted","Data":"9a658de55532ad0b8b15e976f47aac898695a74d94c2c28c79bb7e37af3738c4"} Sep 30 12:40:53 crc kubenswrapper[5002]: I0930 12:40:53.334686 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-mg4dg"] Sep 30 12:40:53 crc kubenswrapper[5002]: I0930 12:40:53.436135 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-mg4dg\" (UID: \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\") " pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" Sep 30 12:40:53 crc kubenswrapper[5002]: I0930 12:40:53.436254 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jgc4s\" (UniqueName: \"kubernetes.io/projected/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-kube-api-access-jgc4s\") pod \"dnsmasq-dns-67b789f86c-mg4dg\" (UID: \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\") " pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" Sep 30 12:40:53 crc kubenswrapper[5002]: I0930 12:40:53.436707 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-mg4dg\" (UID: \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\") " pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" Sep 30 12:40:53 crc kubenswrapper[5002]: I0930 12:40:53.436787 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-mg4dg\" (UID: \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\") " pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" Sep 30 12:40:53 crc kubenswrapper[5002]: I0930 12:40:53.437090 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-dns-svc\") pod \"dnsmasq-dns-67b789f86c-mg4dg\" (UID: \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\") " pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" Sep 30 12:40:53 crc kubenswrapper[5002]: I0930 12:40:53.437166 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-config\") pod \"dnsmasq-dns-67b789f86c-mg4dg\" (UID: \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\") " pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" Sep 30 12:40:53 crc kubenswrapper[5002]: I0930 12:40:53.437190 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-mg4dg\" (UID: \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\") " pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" Sep 30 12:40:53 crc kubenswrapper[5002]: I0930 12:40:53.538571 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-mg4dg\" (UID: \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\") " pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" Sep 30 12:40:53 crc kubenswrapper[5002]: I0930 12:40:53.538625 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jgc4s\" (UniqueName: \"kubernetes.io/projected/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-kube-api-access-jgc4s\") pod \"dnsmasq-dns-67b789f86c-mg4dg\" (UID: \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\") " pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" Sep 30 12:40:53 crc kubenswrapper[5002]: I0930 12:40:53.538667 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-mg4dg\" (UID: \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\") " pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" Sep 30 12:40:53 crc kubenswrapper[5002]: I0930 12:40:53.538693 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-mg4dg\" (UID: \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\") " pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" Sep 30 12:40:53 crc kubenswrapper[5002]: I0930 12:40:53.538720 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-dns-svc\") pod \"dnsmasq-dns-67b789f86c-mg4dg\" (UID: \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\") " pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" Sep 30 12:40:53 crc kubenswrapper[5002]: I0930 12:40:53.538754 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-config\") pod \"dnsmasq-dns-67b789f86c-mg4dg\" (UID: \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\") " pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" Sep 30 12:40:53 crc kubenswrapper[5002]: I0930 12:40:53.538773 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-mg4dg\" (UID: \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\") " pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" Sep 30 12:40:53 crc kubenswrapper[5002]: I0930 12:40:53.540010 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-mg4dg\" (UID: \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\") " pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" Sep 30 12:40:53 crc kubenswrapper[5002]: I0930 12:40:53.540198 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-dns-svc\") pod \"dnsmasq-dns-67b789f86c-mg4dg\" (UID: \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\") " pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" Sep 30 12:40:53 crc kubenswrapper[5002]: I0930 12:40:53.540222 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-mg4dg\" (UID: \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\") " pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" Sep 30 12:40:53 crc kubenswrapper[5002]: I0930 12:40:53.540276 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-config\") pod \"dnsmasq-dns-67b789f86c-mg4dg\" (UID: \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\") " pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" Sep 30 12:40:53 crc kubenswrapper[5002]: I0930 12:40:53.540416 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-mg4dg\" (UID: \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\") " pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" Sep 30 12:40:53 crc kubenswrapper[5002]: I0930 12:40:53.540533 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-mg4dg\" (UID: \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\") " pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" Sep 30 12:40:53 crc kubenswrapper[5002]: I0930 12:40:53.563250 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jgc4s\" (UniqueName: \"kubernetes.io/projected/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-kube-api-access-jgc4s\") pod \"dnsmasq-dns-67b789f86c-mg4dg\" (UID: \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\") " pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" Sep 30 12:40:53 crc kubenswrapper[5002]: I0930 12:40:53.638216 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" Sep 30 12:40:54 crc kubenswrapper[5002]: I0930 12:40:54.076600 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-mg4dg"] Sep 30 12:40:54 crc kubenswrapper[5002]: W0930 12:40:54.081073 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfcb60551_c82d_4912_8864_aa8c1b3ae7ca.slice/crio-3266e1996f714e410bf18f8e65416db8760a7b5d258a2e957da405683600042e WatchSource:0}: Error finding container 3266e1996f714e410bf18f8e65416db8760a7b5d258a2e957da405683600042e: Status 404 returned error can't find the container with id 3266e1996f714e410bf18f8e65416db8760a7b5d258a2e957da405683600042e Sep 30 12:40:54 crc kubenswrapper[5002]: I0930 12:40:54.341358 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" event={"ID":"fcb60551-c82d-4912-8864-aa8c1b3ae7ca","Type":"ContainerStarted","Data":"3266e1996f714e410bf18f8e65416db8760a7b5d258a2e957da405683600042e"} Sep 30 12:40:55 crc kubenswrapper[5002]: I0930 12:40:55.355444 5002 generic.go:334] "Generic (PLEG): container finished" podID="fcb60551-c82d-4912-8864-aa8c1b3ae7ca" containerID="17bb285e115ba20764467469ddf160f4fde6ee8558f891384f3fc28d138cce44" exitCode=0 Sep 30 12:40:55 crc kubenswrapper[5002]: I0930 12:40:55.355525 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" event={"ID":"fcb60551-c82d-4912-8864-aa8c1b3ae7ca","Type":"ContainerDied","Data":"17bb285e115ba20764467469ddf160f4fde6ee8558f891384f3fc28d138cce44"} Sep 30 12:40:56 crc kubenswrapper[5002]: I0930 12:40:56.369126 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" event={"ID":"fcb60551-c82d-4912-8864-aa8c1b3ae7ca","Type":"ContainerStarted","Data":"0c478ecd6889ef2ac07476f0ae5f8a578fb4ef02021415210d5ba2556492fb25"} Sep 30 12:40:56 crc kubenswrapper[5002]: I0930 12:40:56.369443 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" Sep 30 12:40:56 crc kubenswrapper[5002]: I0930 12:40:56.397939 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" podStartSLOduration=3.397915776 podStartE2EDuration="3.397915776s" podCreationTimestamp="2025-09-30 12:40:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:40:56.392994239 +0000 UTC m=+1230.642676415" watchObservedRunningTime="2025-09-30 12:40:56.397915776 +0000 UTC m=+1230.647597932" Sep 30 12:41:02 crc kubenswrapper[5002]: I0930 12:41:02.098126 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:41:02 crc kubenswrapper[5002]: I0930 12:41:02.098709 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:41:03 crc kubenswrapper[5002]: I0930 12:41:03.639805 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" Sep 30 12:41:03 crc kubenswrapper[5002]: I0930 12:41:03.736600 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-sh88w"] Sep 30 12:41:03 crc kubenswrapper[5002]: I0930 12:41:03.739842 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" podUID="c2244511-4d5b-4162-a0f3-e5c8e89781f1" containerName="dnsmasq-dns" containerID="cri-o://7222b212b9650df669f53aa165030071b2723fbbd90068c50ac75f46ccbcc22c" gracePeriod=10 Sep 30 12:41:03 crc kubenswrapper[5002]: I0930 12:41:03.934736 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cb6ffcf87-kzgd6"] Sep 30 12:41:03 crc kubenswrapper[5002]: I0930 12:41:03.969832 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cb6ffcf87-kzgd6" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:03.998717 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cb6ffcf87-kzgd6"] Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.077901 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8ef8ec7f-169f-494e-b17d-4206c144d4f3-ovsdbserver-sb\") pod \"dnsmasq-dns-cb6ffcf87-kzgd6\" (UID: \"8ef8ec7f-169f-494e-b17d-4206c144d4f3\") " pod="openstack/dnsmasq-dns-cb6ffcf87-kzgd6" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.077977 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rksft\" (UniqueName: \"kubernetes.io/projected/8ef8ec7f-169f-494e-b17d-4206c144d4f3-kube-api-access-rksft\") pod \"dnsmasq-dns-cb6ffcf87-kzgd6\" (UID: \"8ef8ec7f-169f-494e-b17d-4206c144d4f3\") " pod="openstack/dnsmasq-dns-cb6ffcf87-kzgd6" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.078029 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8ef8ec7f-169f-494e-b17d-4206c144d4f3-dns-swift-storage-0\") pod \"dnsmasq-dns-cb6ffcf87-kzgd6\" (UID: \"8ef8ec7f-169f-494e-b17d-4206c144d4f3\") " pod="openstack/dnsmasq-dns-cb6ffcf87-kzgd6" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.078128 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8ef8ec7f-169f-494e-b17d-4206c144d4f3-ovsdbserver-nb\") pod \"dnsmasq-dns-cb6ffcf87-kzgd6\" (UID: \"8ef8ec7f-169f-494e-b17d-4206c144d4f3\") " pod="openstack/dnsmasq-dns-cb6ffcf87-kzgd6" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.078158 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8ef8ec7f-169f-494e-b17d-4206c144d4f3-dns-svc\") pod \"dnsmasq-dns-cb6ffcf87-kzgd6\" (UID: \"8ef8ec7f-169f-494e-b17d-4206c144d4f3\") " pod="openstack/dnsmasq-dns-cb6ffcf87-kzgd6" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.078226 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ef8ec7f-169f-494e-b17d-4206c144d4f3-config\") pod \"dnsmasq-dns-cb6ffcf87-kzgd6\" (UID: \"8ef8ec7f-169f-494e-b17d-4206c144d4f3\") " pod="openstack/dnsmasq-dns-cb6ffcf87-kzgd6" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.078297 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/8ef8ec7f-169f-494e-b17d-4206c144d4f3-openstack-edpm-ipam\") pod \"dnsmasq-dns-cb6ffcf87-kzgd6\" (UID: \"8ef8ec7f-169f-494e-b17d-4206c144d4f3\") " pod="openstack/dnsmasq-dns-cb6ffcf87-kzgd6" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.180047 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8ef8ec7f-169f-494e-b17d-4206c144d4f3-dns-svc\") pod \"dnsmasq-dns-cb6ffcf87-kzgd6\" (UID: \"8ef8ec7f-169f-494e-b17d-4206c144d4f3\") " pod="openstack/dnsmasq-dns-cb6ffcf87-kzgd6" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.180120 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ef8ec7f-169f-494e-b17d-4206c144d4f3-config\") pod \"dnsmasq-dns-cb6ffcf87-kzgd6\" (UID: \"8ef8ec7f-169f-494e-b17d-4206c144d4f3\") " pod="openstack/dnsmasq-dns-cb6ffcf87-kzgd6" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.180174 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/8ef8ec7f-169f-494e-b17d-4206c144d4f3-openstack-edpm-ipam\") pod \"dnsmasq-dns-cb6ffcf87-kzgd6\" (UID: \"8ef8ec7f-169f-494e-b17d-4206c144d4f3\") " pod="openstack/dnsmasq-dns-cb6ffcf87-kzgd6" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.180283 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8ef8ec7f-169f-494e-b17d-4206c144d4f3-ovsdbserver-sb\") pod \"dnsmasq-dns-cb6ffcf87-kzgd6\" (UID: \"8ef8ec7f-169f-494e-b17d-4206c144d4f3\") " pod="openstack/dnsmasq-dns-cb6ffcf87-kzgd6" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.180431 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rksft\" (UniqueName: \"kubernetes.io/projected/8ef8ec7f-169f-494e-b17d-4206c144d4f3-kube-api-access-rksft\") pod \"dnsmasq-dns-cb6ffcf87-kzgd6\" (UID: \"8ef8ec7f-169f-494e-b17d-4206c144d4f3\") " pod="openstack/dnsmasq-dns-cb6ffcf87-kzgd6" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.181340 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8ef8ec7f-169f-494e-b17d-4206c144d4f3-dns-swift-storage-0\") pod \"dnsmasq-dns-cb6ffcf87-kzgd6\" (UID: \"8ef8ec7f-169f-494e-b17d-4206c144d4f3\") " pod="openstack/dnsmasq-dns-cb6ffcf87-kzgd6" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.181438 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8ef8ec7f-169f-494e-b17d-4206c144d4f3-ovsdbserver-nb\") pod \"dnsmasq-dns-cb6ffcf87-kzgd6\" (UID: \"8ef8ec7f-169f-494e-b17d-4206c144d4f3\") " pod="openstack/dnsmasq-dns-cb6ffcf87-kzgd6" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.181244 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/8ef8ec7f-169f-494e-b17d-4206c144d4f3-openstack-edpm-ipam\") pod \"dnsmasq-dns-cb6ffcf87-kzgd6\" (UID: \"8ef8ec7f-169f-494e-b17d-4206c144d4f3\") " pod="openstack/dnsmasq-dns-cb6ffcf87-kzgd6" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.181246 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8ef8ec7f-169f-494e-b17d-4206c144d4f3-dns-svc\") pod \"dnsmasq-dns-cb6ffcf87-kzgd6\" (UID: \"8ef8ec7f-169f-494e-b17d-4206c144d4f3\") " pod="openstack/dnsmasq-dns-cb6ffcf87-kzgd6" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.181111 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8ef8ec7f-169f-494e-b17d-4206c144d4f3-ovsdbserver-sb\") pod \"dnsmasq-dns-cb6ffcf87-kzgd6\" (UID: \"8ef8ec7f-169f-494e-b17d-4206c144d4f3\") " pod="openstack/dnsmasq-dns-cb6ffcf87-kzgd6" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.181125 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ef8ec7f-169f-494e-b17d-4206c144d4f3-config\") pod \"dnsmasq-dns-cb6ffcf87-kzgd6\" (UID: \"8ef8ec7f-169f-494e-b17d-4206c144d4f3\") " pod="openstack/dnsmasq-dns-cb6ffcf87-kzgd6" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.182674 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8ef8ec7f-169f-494e-b17d-4206c144d4f3-dns-swift-storage-0\") pod \"dnsmasq-dns-cb6ffcf87-kzgd6\" (UID: \"8ef8ec7f-169f-494e-b17d-4206c144d4f3\") " pod="openstack/dnsmasq-dns-cb6ffcf87-kzgd6" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.182815 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8ef8ec7f-169f-494e-b17d-4206c144d4f3-ovsdbserver-nb\") pod \"dnsmasq-dns-cb6ffcf87-kzgd6\" (UID: \"8ef8ec7f-169f-494e-b17d-4206c144d4f3\") " pod="openstack/dnsmasq-dns-cb6ffcf87-kzgd6" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.205643 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rksft\" (UniqueName: \"kubernetes.io/projected/8ef8ec7f-169f-494e-b17d-4206c144d4f3-kube-api-access-rksft\") pod \"dnsmasq-dns-cb6ffcf87-kzgd6\" (UID: \"8ef8ec7f-169f-494e-b17d-4206c144d4f3\") " pod="openstack/dnsmasq-dns-cb6ffcf87-kzgd6" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.291801 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.299502 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cb6ffcf87-kzgd6" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.385157 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-dns-swift-storage-0\") pod \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\" (UID: \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\") " Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.385786 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-config\") pod \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\" (UID: \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\") " Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.385877 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-ovsdbserver-nb\") pod \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\" (UID: \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\") " Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.385917 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-ovsdbserver-sb\") pod \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\" (UID: \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\") " Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.385978 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mm6k6\" (UniqueName: \"kubernetes.io/projected/c2244511-4d5b-4162-a0f3-e5c8e89781f1-kube-api-access-mm6k6\") pod \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\" (UID: \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\") " Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.386021 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-dns-svc\") pod \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\" (UID: \"c2244511-4d5b-4162-a0f3-e5c8e89781f1\") " Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.389729 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2244511-4d5b-4162-a0f3-e5c8e89781f1-kube-api-access-mm6k6" (OuterVolumeSpecName: "kube-api-access-mm6k6") pod "c2244511-4d5b-4162-a0f3-e5c8e89781f1" (UID: "c2244511-4d5b-4162-a0f3-e5c8e89781f1"). InnerVolumeSpecName "kube-api-access-mm6k6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.457034 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c2244511-4d5b-4162-a0f3-e5c8e89781f1" (UID: "c2244511-4d5b-4162-a0f3-e5c8e89781f1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.457837 5002 generic.go:334] "Generic (PLEG): container finished" podID="c2244511-4d5b-4162-a0f3-e5c8e89781f1" containerID="7222b212b9650df669f53aa165030071b2723fbbd90068c50ac75f46ccbcc22c" exitCode=0 Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.457888 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" event={"ID":"c2244511-4d5b-4162-a0f3-e5c8e89781f1","Type":"ContainerDied","Data":"7222b212b9650df669f53aa165030071b2723fbbd90068c50ac75f46ccbcc22c"} Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.457923 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" event={"ID":"c2244511-4d5b-4162-a0f3-e5c8e89781f1","Type":"ContainerDied","Data":"589ae39deefc1cc09453554cf6c0d9b74c7aa6507b18c7fd5cf8c6e62167bb40"} Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.457942 5002 scope.go:117] "RemoveContainer" containerID="7222b212b9650df669f53aa165030071b2723fbbd90068c50ac75f46ccbcc22c" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.457945 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-sh88w" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.459666 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c2244511-4d5b-4162-a0f3-e5c8e89781f1" (UID: "c2244511-4d5b-4162-a0f3-e5c8e89781f1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.468769 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c2244511-4d5b-4162-a0f3-e5c8e89781f1" (UID: "c2244511-4d5b-4162-a0f3-e5c8e89781f1"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.473535 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c2244511-4d5b-4162-a0f3-e5c8e89781f1" (UID: "c2244511-4d5b-4162-a0f3-e5c8e89781f1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.486852 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-config" (OuterVolumeSpecName: "config") pod "c2244511-4d5b-4162-a0f3-e5c8e89781f1" (UID: "c2244511-4d5b-4162-a0f3-e5c8e89781f1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.488104 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mm6k6\" (UniqueName: \"kubernetes.io/projected/c2244511-4d5b-4162-a0f3-e5c8e89781f1-kube-api-access-mm6k6\") on node \"crc\" DevicePath \"\"" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.488134 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.488148 5002 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.488158 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.488169 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.488180 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c2244511-4d5b-4162-a0f3-e5c8e89781f1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.492658 5002 scope.go:117] "RemoveContainer" containerID="ae633a35d36118945715011b877b6c72db0305486d3ffcbf658df54847155ce1" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.514518 5002 scope.go:117] "RemoveContainer" containerID="7222b212b9650df669f53aa165030071b2723fbbd90068c50ac75f46ccbcc22c" Sep 30 12:41:04 crc kubenswrapper[5002]: E0930 12:41:04.514968 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7222b212b9650df669f53aa165030071b2723fbbd90068c50ac75f46ccbcc22c\": container with ID starting with 7222b212b9650df669f53aa165030071b2723fbbd90068c50ac75f46ccbcc22c not found: ID does not exist" containerID="7222b212b9650df669f53aa165030071b2723fbbd90068c50ac75f46ccbcc22c" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.515002 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7222b212b9650df669f53aa165030071b2723fbbd90068c50ac75f46ccbcc22c"} err="failed to get container status \"7222b212b9650df669f53aa165030071b2723fbbd90068c50ac75f46ccbcc22c\": rpc error: code = NotFound desc = could not find container \"7222b212b9650df669f53aa165030071b2723fbbd90068c50ac75f46ccbcc22c\": container with ID starting with 7222b212b9650df669f53aa165030071b2723fbbd90068c50ac75f46ccbcc22c not found: ID does not exist" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.515053 5002 scope.go:117] "RemoveContainer" containerID="ae633a35d36118945715011b877b6c72db0305486d3ffcbf658df54847155ce1" Sep 30 12:41:04 crc kubenswrapper[5002]: E0930 12:41:04.515387 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae633a35d36118945715011b877b6c72db0305486d3ffcbf658df54847155ce1\": container with ID starting with ae633a35d36118945715011b877b6c72db0305486d3ffcbf658df54847155ce1 not found: ID does not exist" containerID="ae633a35d36118945715011b877b6c72db0305486d3ffcbf658df54847155ce1" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.515413 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae633a35d36118945715011b877b6c72db0305486d3ffcbf658df54847155ce1"} err="failed to get container status \"ae633a35d36118945715011b877b6c72db0305486d3ffcbf658df54847155ce1\": rpc error: code = NotFound desc = could not find container \"ae633a35d36118945715011b877b6c72db0305486d3ffcbf658df54847155ce1\": container with ID starting with ae633a35d36118945715011b877b6c72db0305486d3ffcbf658df54847155ce1 not found: ID does not exist" Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.763379 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cb6ffcf87-kzgd6"] Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.783318 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-sh88w"] Sep 30 12:41:04 crc kubenswrapper[5002]: I0930 12:41:04.792063 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-sh88w"] Sep 30 12:41:05 crc kubenswrapper[5002]: I0930 12:41:05.470183 5002 generic.go:334] "Generic (PLEG): container finished" podID="8ef8ec7f-169f-494e-b17d-4206c144d4f3" containerID="0449b661d090527b72c7bd404ff35e35ea8bf22a6642057766902ece09c2ecf7" exitCode=0 Sep 30 12:41:05 crc kubenswrapper[5002]: I0930 12:41:05.470282 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb6ffcf87-kzgd6" event={"ID":"8ef8ec7f-169f-494e-b17d-4206c144d4f3","Type":"ContainerDied","Data":"0449b661d090527b72c7bd404ff35e35ea8bf22a6642057766902ece09c2ecf7"} Sep 30 12:41:05 crc kubenswrapper[5002]: I0930 12:41:05.470347 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb6ffcf87-kzgd6" event={"ID":"8ef8ec7f-169f-494e-b17d-4206c144d4f3","Type":"ContainerStarted","Data":"8900c77f5684631cde6b80cf201c3be7a1d3366aaeb31e93aa860d7efc5da2dd"} Sep 30 12:41:06 crc kubenswrapper[5002]: I0930 12:41:06.483588 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb6ffcf87-kzgd6" event={"ID":"8ef8ec7f-169f-494e-b17d-4206c144d4f3","Type":"ContainerStarted","Data":"f95c02e597d0367a52388d152bc0a129c4b9b51916c2e58786680e576a56df0d"} Sep 30 12:41:06 crc kubenswrapper[5002]: I0930 12:41:06.483841 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-cb6ffcf87-kzgd6" Sep 30 12:41:06 crc kubenswrapper[5002]: I0930 12:41:06.516846 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-cb6ffcf87-kzgd6" podStartSLOduration=3.516824819 podStartE2EDuration="3.516824819s" podCreationTimestamp="2025-09-30 12:41:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:41:06.506982101 +0000 UTC m=+1240.756664257" watchObservedRunningTime="2025-09-30 12:41:06.516824819 +0000 UTC m=+1240.766506975" Sep 30 12:41:06 crc kubenswrapper[5002]: I0930 12:41:06.690609 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2244511-4d5b-4162-a0f3-e5c8e89781f1" path="/var/lib/kubelet/pods/c2244511-4d5b-4162-a0f3-e5c8e89781f1/volumes" Sep 30 12:41:14 crc kubenswrapper[5002]: I0930 12:41:14.301767 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-cb6ffcf87-kzgd6" Sep 30 12:41:14 crc kubenswrapper[5002]: I0930 12:41:14.382513 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-mg4dg"] Sep 30 12:41:14 crc kubenswrapper[5002]: I0930 12:41:14.382996 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" podUID="fcb60551-c82d-4912-8864-aa8c1b3ae7ca" containerName="dnsmasq-dns" containerID="cri-o://0c478ecd6889ef2ac07476f0ae5f8a578fb4ef02021415210d5ba2556492fb25" gracePeriod=10 Sep 30 12:41:14 crc kubenswrapper[5002]: I0930 12:41:14.558799 5002 generic.go:334] "Generic (PLEG): container finished" podID="fcb60551-c82d-4912-8864-aa8c1b3ae7ca" containerID="0c478ecd6889ef2ac07476f0ae5f8a578fb4ef02021415210d5ba2556492fb25" exitCode=0 Sep 30 12:41:14 crc kubenswrapper[5002]: I0930 12:41:14.558884 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" event={"ID":"fcb60551-c82d-4912-8864-aa8c1b3ae7ca","Type":"ContainerDied","Data":"0c478ecd6889ef2ac07476f0ae5f8a578fb4ef02021415210d5ba2556492fb25"} Sep 30 12:41:14 crc kubenswrapper[5002]: I0930 12:41:14.864081 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" Sep 30 12:41:14 crc kubenswrapper[5002]: I0930 12:41:14.882359 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-dns-svc\") pod \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\" (UID: \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\") " Sep 30 12:41:14 crc kubenswrapper[5002]: I0930 12:41:14.882698 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jgc4s\" (UniqueName: \"kubernetes.io/projected/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-kube-api-access-jgc4s\") pod \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\" (UID: \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\") " Sep 30 12:41:14 crc kubenswrapper[5002]: I0930 12:41:14.882846 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-dns-swift-storage-0\") pod \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\" (UID: \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\") " Sep 30 12:41:14 crc kubenswrapper[5002]: I0930 12:41:14.882942 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-config\") pod \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\" (UID: \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\") " Sep 30 12:41:14 crc kubenswrapper[5002]: I0930 12:41:14.883058 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-ovsdbserver-sb\") pod \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\" (UID: \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\") " Sep 30 12:41:14 crc kubenswrapper[5002]: I0930 12:41:14.883213 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-ovsdbserver-nb\") pod \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\" (UID: \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\") " Sep 30 12:41:14 crc kubenswrapper[5002]: I0930 12:41:14.883349 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-openstack-edpm-ipam\") pod \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\" (UID: \"fcb60551-c82d-4912-8864-aa8c1b3ae7ca\") " Sep 30 12:41:14 crc kubenswrapper[5002]: I0930 12:41:14.891614 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-kube-api-access-jgc4s" (OuterVolumeSpecName: "kube-api-access-jgc4s") pod "fcb60551-c82d-4912-8864-aa8c1b3ae7ca" (UID: "fcb60551-c82d-4912-8864-aa8c1b3ae7ca"). InnerVolumeSpecName "kube-api-access-jgc4s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:41:14 crc kubenswrapper[5002]: I0930 12:41:14.943328 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "fcb60551-c82d-4912-8864-aa8c1b3ae7ca" (UID: "fcb60551-c82d-4912-8864-aa8c1b3ae7ca"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:41:14 crc kubenswrapper[5002]: I0930 12:41:14.946981 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "fcb60551-c82d-4912-8864-aa8c1b3ae7ca" (UID: "fcb60551-c82d-4912-8864-aa8c1b3ae7ca"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:41:14 crc kubenswrapper[5002]: I0930 12:41:14.948393 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "fcb60551-c82d-4912-8864-aa8c1b3ae7ca" (UID: "fcb60551-c82d-4912-8864-aa8c1b3ae7ca"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:41:14 crc kubenswrapper[5002]: I0930 12:41:14.948532 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "fcb60551-c82d-4912-8864-aa8c1b3ae7ca" (UID: "fcb60551-c82d-4912-8864-aa8c1b3ae7ca"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:41:14 crc kubenswrapper[5002]: I0930 12:41:14.960764 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "fcb60551-c82d-4912-8864-aa8c1b3ae7ca" (UID: "fcb60551-c82d-4912-8864-aa8c1b3ae7ca"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:41:14 crc kubenswrapper[5002]: I0930 12:41:14.967754 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-config" (OuterVolumeSpecName: "config") pod "fcb60551-c82d-4912-8864-aa8c1b3ae7ca" (UID: "fcb60551-c82d-4912-8864-aa8c1b3ae7ca"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:41:14 crc kubenswrapper[5002]: I0930 12:41:14.985301 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 12:41:14 crc kubenswrapper[5002]: I0930 12:41:14.985337 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jgc4s\" (UniqueName: \"kubernetes.io/projected/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-kube-api-access-jgc4s\") on node \"crc\" DevicePath \"\"" Sep 30 12:41:14 crc kubenswrapper[5002]: I0930 12:41:14.985351 5002 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 12:41:14 crc kubenswrapper[5002]: I0930 12:41:14.985363 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-config\") on node \"crc\" DevicePath \"\"" Sep 30 12:41:14 crc kubenswrapper[5002]: I0930 12:41:14.985373 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 12:41:14 crc kubenswrapper[5002]: I0930 12:41:14.985381 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 12:41:14 crc kubenswrapper[5002]: I0930 12:41:14.985391 5002 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/fcb60551-c82d-4912-8864-aa8c1b3ae7ca-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Sep 30 12:41:15 crc kubenswrapper[5002]: I0930 12:41:15.569771 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" event={"ID":"fcb60551-c82d-4912-8864-aa8c1b3ae7ca","Type":"ContainerDied","Data":"3266e1996f714e410bf18f8e65416db8760a7b5d258a2e957da405683600042e"} Sep 30 12:41:15 crc kubenswrapper[5002]: I0930 12:41:15.569845 5002 scope.go:117] "RemoveContainer" containerID="0c478ecd6889ef2ac07476f0ae5f8a578fb4ef02021415210d5ba2556492fb25" Sep 30 12:41:15 crc kubenswrapper[5002]: I0930 12:41:15.569878 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-mg4dg" Sep 30 12:41:15 crc kubenswrapper[5002]: I0930 12:41:15.589162 5002 scope.go:117] "RemoveContainer" containerID="17bb285e115ba20764467469ddf160f4fde6ee8558f891384f3fc28d138cce44" Sep 30 12:41:15 crc kubenswrapper[5002]: I0930 12:41:15.613806 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-mg4dg"] Sep 30 12:41:15 crc kubenswrapper[5002]: I0930 12:41:15.622725 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-mg4dg"] Sep 30 12:41:16 crc kubenswrapper[5002]: I0930 12:41:16.688432 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcb60551-c82d-4912-8864-aa8c1b3ae7ca" path="/var/lib/kubelet/pods/fcb60551-c82d-4912-8864-aa8c1b3ae7ca/volumes" Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.000653 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9"] Sep 30 12:41:23 crc kubenswrapper[5002]: E0930 12:41:23.002706 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2244511-4d5b-4162-a0f3-e5c8e89781f1" containerName="dnsmasq-dns" Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.002894 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2244511-4d5b-4162-a0f3-e5c8e89781f1" containerName="dnsmasq-dns" Sep 30 12:41:23 crc kubenswrapper[5002]: E0930 12:41:23.002968 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcb60551-c82d-4912-8864-aa8c1b3ae7ca" containerName="init" Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.003027 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcb60551-c82d-4912-8864-aa8c1b3ae7ca" containerName="init" Sep 30 12:41:23 crc kubenswrapper[5002]: E0930 12:41:23.003092 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2244511-4d5b-4162-a0f3-e5c8e89781f1" containerName="init" Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.003149 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2244511-4d5b-4162-a0f3-e5c8e89781f1" containerName="init" Sep 30 12:41:23 crc kubenswrapper[5002]: E0930 12:41:23.003235 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcb60551-c82d-4912-8864-aa8c1b3ae7ca" containerName="dnsmasq-dns" Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.003297 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcb60551-c82d-4912-8864-aa8c1b3ae7ca" containerName="dnsmasq-dns" Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.003553 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcb60551-c82d-4912-8864-aa8c1b3ae7ca" containerName="dnsmasq-dns" Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.003626 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2244511-4d5b-4162-a0f3-e5c8e89781f1" containerName="dnsmasq-dns" Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.004270 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9" Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.007250 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.007303 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.007333 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-ddhrk" Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.007357 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.014872 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9"] Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.044897 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67886def-b13b-463d-a4f6-3a0d13fa9580-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9\" (UID: \"67886def-b13b-463d-a4f6-3a0d13fa9580\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9" Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.045066 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/67886def-b13b-463d-a4f6-3a0d13fa9580-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9\" (UID: \"67886def-b13b-463d-a4f6-3a0d13fa9580\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9" Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.045101 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twvsr\" (UniqueName: \"kubernetes.io/projected/67886def-b13b-463d-a4f6-3a0d13fa9580-kube-api-access-twvsr\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9\" (UID: \"67886def-b13b-463d-a4f6-3a0d13fa9580\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9" Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.045365 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/67886def-b13b-463d-a4f6-3a0d13fa9580-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9\" (UID: \"67886def-b13b-463d-a4f6-3a0d13fa9580\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9" Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.147272 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/67886def-b13b-463d-a4f6-3a0d13fa9580-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9\" (UID: \"67886def-b13b-463d-a4f6-3a0d13fa9580\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9" Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.147339 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twvsr\" (UniqueName: \"kubernetes.io/projected/67886def-b13b-463d-a4f6-3a0d13fa9580-kube-api-access-twvsr\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9\" (UID: \"67886def-b13b-463d-a4f6-3a0d13fa9580\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9" Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.147416 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/67886def-b13b-463d-a4f6-3a0d13fa9580-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9\" (UID: \"67886def-b13b-463d-a4f6-3a0d13fa9580\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9" Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.147504 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67886def-b13b-463d-a4f6-3a0d13fa9580-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9\" (UID: \"67886def-b13b-463d-a4f6-3a0d13fa9580\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9" Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.153514 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/67886def-b13b-463d-a4f6-3a0d13fa9580-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9\" (UID: \"67886def-b13b-463d-a4f6-3a0d13fa9580\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9" Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.153763 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67886def-b13b-463d-a4f6-3a0d13fa9580-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9\" (UID: \"67886def-b13b-463d-a4f6-3a0d13fa9580\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9" Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.154183 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/67886def-b13b-463d-a4f6-3a0d13fa9580-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9\" (UID: \"67886def-b13b-463d-a4f6-3a0d13fa9580\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9" Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.165377 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twvsr\" (UniqueName: \"kubernetes.io/projected/67886def-b13b-463d-a4f6-3a0d13fa9580-kube-api-access-twvsr\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9\" (UID: \"67886def-b13b-463d-a4f6-3a0d13fa9580\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9" Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.328680 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9" Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.656800 5002 generic.go:334] "Generic (PLEG): container finished" podID="9f3cd025-3ba6-453b-9224-ee63cf57890c" containerID="71683fa88a8d46889041e673a9d26be32e82426ac6b2937fcc539eb48193ccf8" exitCode=0 Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.657109 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9f3cd025-3ba6-453b-9224-ee63cf57890c","Type":"ContainerDied","Data":"71683fa88a8d46889041e673a9d26be32e82426ac6b2937fcc539eb48193ccf8"} Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.658528 5002 generic.go:334] "Generic (PLEG): container finished" podID="4069e130-8a6c-4bf6-9885-b8e35857e519" containerID="9a658de55532ad0b8b15e976f47aac898695a74d94c2c28c79bb7e37af3738c4" exitCode=0 Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.658551 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"4069e130-8a6c-4bf6-9885-b8e35857e519","Type":"ContainerDied","Data":"9a658de55532ad0b8b15e976f47aac898695a74d94c2c28c79bb7e37af3738c4"} Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.845562 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9"] Sep 30 12:41:23 crc kubenswrapper[5002]: W0930 12:41:23.853201 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67886def_b13b_463d_a4f6_3a0d13fa9580.slice/crio-258b41ac30040120bf7170b64b39650ae2cf2663b0f86e0bc6bb587c054328b9 WatchSource:0}: Error finding container 258b41ac30040120bf7170b64b39650ae2cf2663b0f86e0bc6bb587c054328b9: Status 404 returned error can't find the container with id 258b41ac30040120bf7170b64b39650ae2cf2663b0f86e0bc6bb587c054328b9 Sep 30 12:41:23 crc kubenswrapper[5002]: I0930 12:41:23.857235 5002 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 12:41:24 crc kubenswrapper[5002]: I0930 12:41:24.668999 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"4069e130-8a6c-4bf6-9885-b8e35857e519","Type":"ContainerStarted","Data":"92355670a4bcce3d2703a3232b0d059bf8939fa487a8c41420ca2f4fd9f3d01f"} Sep 30 12:41:24 crc kubenswrapper[5002]: I0930 12:41:24.669509 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:41:24 crc kubenswrapper[5002]: I0930 12:41:24.671950 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9f3cd025-3ba6-453b-9224-ee63cf57890c","Type":"ContainerStarted","Data":"aaf5e118b498e3a79ac65756ae584f1d1fbecb0238f9330177d4d34d52ab2eb9"} Sep 30 12:41:24 crc kubenswrapper[5002]: I0930 12:41:24.672160 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 30 12:41:24 crc kubenswrapper[5002]: I0930 12:41:24.673098 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9" event={"ID":"67886def-b13b-463d-a4f6-3a0d13fa9580","Type":"ContainerStarted","Data":"258b41ac30040120bf7170b64b39650ae2cf2663b0f86e0bc6bb587c054328b9"} Sep 30 12:41:24 crc kubenswrapper[5002]: I0930 12:41:24.703886 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=33.703863634 podStartE2EDuration="33.703863634s" podCreationTimestamp="2025-09-30 12:40:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:41:24.69973111 +0000 UTC m=+1258.949413266" watchObservedRunningTime="2025-09-30 12:41:24.703863634 +0000 UTC m=+1258.953545780" Sep 30 12:41:24 crc kubenswrapper[5002]: I0930 12:41:24.733146 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=33.733124233 podStartE2EDuration="33.733124233s" podCreationTimestamp="2025-09-30 12:40:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 12:41:24.726963698 +0000 UTC m=+1258.976645884" watchObservedRunningTime="2025-09-30 12:41:24.733124233 +0000 UTC m=+1258.982806379" Sep 30 12:41:32 crc kubenswrapper[5002]: I0930 12:41:32.098672 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:41:32 crc kubenswrapper[5002]: I0930 12:41:32.099210 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:41:32 crc kubenswrapper[5002]: I0930 12:41:32.477572 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 12:41:32 crc kubenswrapper[5002]: I0930 12:41:32.749221 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9" event={"ID":"67886def-b13b-463d-a4f6-3a0d13fa9580","Type":"ContainerStarted","Data":"abd0428b982c7649e4ea197459a6dd1dd4bda4934ba37abcb06a7c21f3d64ab5"} Sep 30 12:41:32 crc kubenswrapper[5002]: I0930 12:41:32.765296 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9" podStartSLOduration=2.147688146 podStartE2EDuration="10.765280531s" podCreationTimestamp="2025-09-30 12:41:22 +0000 UTC" firstStartedPulling="2025-09-30 12:41:23.856943906 +0000 UTC m=+1258.106626052" lastFinishedPulling="2025-09-30 12:41:32.474536291 +0000 UTC m=+1266.724218437" observedRunningTime="2025-09-30 12:41:32.764961213 +0000 UTC m=+1267.014643389" watchObservedRunningTime="2025-09-30 12:41:32.765280531 +0000 UTC m=+1267.014962677" Sep 30 12:41:41 crc kubenswrapper[5002]: I0930 12:41:41.730775 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 30 12:41:41 crc kubenswrapper[5002]: I0930 12:41:41.742692 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 30 12:41:44 crc kubenswrapper[5002]: I0930 12:41:44.860709 5002 generic.go:334] "Generic (PLEG): container finished" podID="67886def-b13b-463d-a4f6-3a0d13fa9580" containerID="abd0428b982c7649e4ea197459a6dd1dd4bda4934ba37abcb06a7c21f3d64ab5" exitCode=0 Sep 30 12:41:44 crc kubenswrapper[5002]: I0930 12:41:44.860865 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9" event={"ID":"67886def-b13b-463d-a4f6-3a0d13fa9580","Type":"ContainerDied","Data":"abd0428b982c7649e4ea197459a6dd1dd4bda4934ba37abcb06a7c21f3d64ab5"} Sep 30 12:41:46 crc kubenswrapper[5002]: I0930 12:41:46.356421 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9" Sep 30 12:41:46 crc kubenswrapper[5002]: I0930 12:41:46.495245 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67886def-b13b-463d-a4f6-3a0d13fa9580-repo-setup-combined-ca-bundle\") pod \"67886def-b13b-463d-a4f6-3a0d13fa9580\" (UID: \"67886def-b13b-463d-a4f6-3a0d13fa9580\") " Sep 30 12:41:46 crc kubenswrapper[5002]: I0930 12:41:46.495861 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/67886def-b13b-463d-a4f6-3a0d13fa9580-ssh-key\") pod \"67886def-b13b-463d-a4f6-3a0d13fa9580\" (UID: \"67886def-b13b-463d-a4f6-3a0d13fa9580\") " Sep 30 12:41:46 crc kubenswrapper[5002]: I0930 12:41:46.495902 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/67886def-b13b-463d-a4f6-3a0d13fa9580-inventory\") pod \"67886def-b13b-463d-a4f6-3a0d13fa9580\" (UID: \"67886def-b13b-463d-a4f6-3a0d13fa9580\") " Sep 30 12:41:46 crc kubenswrapper[5002]: I0930 12:41:46.495942 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-twvsr\" (UniqueName: \"kubernetes.io/projected/67886def-b13b-463d-a4f6-3a0d13fa9580-kube-api-access-twvsr\") pod \"67886def-b13b-463d-a4f6-3a0d13fa9580\" (UID: \"67886def-b13b-463d-a4f6-3a0d13fa9580\") " Sep 30 12:41:46 crc kubenswrapper[5002]: I0930 12:41:46.501353 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67886def-b13b-463d-a4f6-3a0d13fa9580-kube-api-access-twvsr" (OuterVolumeSpecName: "kube-api-access-twvsr") pod "67886def-b13b-463d-a4f6-3a0d13fa9580" (UID: "67886def-b13b-463d-a4f6-3a0d13fa9580"). InnerVolumeSpecName "kube-api-access-twvsr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:41:46 crc kubenswrapper[5002]: I0930 12:41:46.502960 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67886def-b13b-463d-a4f6-3a0d13fa9580-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "67886def-b13b-463d-a4f6-3a0d13fa9580" (UID: "67886def-b13b-463d-a4f6-3a0d13fa9580"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:41:46 crc kubenswrapper[5002]: I0930 12:41:46.525439 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67886def-b13b-463d-a4f6-3a0d13fa9580-inventory" (OuterVolumeSpecName: "inventory") pod "67886def-b13b-463d-a4f6-3a0d13fa9580" (UID: "67886def-b13b-463d-a4f6-3a0d13fa9580"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:41:46 crc kubenswrapper[5002]: I0930 12:41:46.535543 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67886def-b13b-463d-a4f6-3a0d13fa9580-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "67886def-b13b-463d-a4f6-3a0d13fa9580" (UID: "67886def-b13b-463d-a4f6-3a0d13fa9580"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:41:46 crc kubenswrapper[5002]: I0930 12:41:46.598334 5002 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67886def-b13b-463d-a4f6-3a0d13fa9580-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:41:46 crc kubenswrapper[5002]: I0930 12:41:46.598365 5002 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/67886def-b13b-463d-a4f6-3a0d13fa9580-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 12:41:46 crc kubenswrapper[5002]: I0930 12:41:46.598376 5002 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/67886def-b13b-463d-a4f6-3a0d13fa9580-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 12:41:46 crc kubenswrapper[5002]: I0930 12:41:46.598411 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-twvsr\" (UniqueName: \"kubernetes.io/projected/67886def-b13b-463d-a4f6-3a0d13fa9580-kube-api-access-twvsr\") on node \"crc\" DevicePath \"\"" Sep 30 12:41:46 crc kubenswrapper[5002]: I0930 12:41:46.878547 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9" event={"ID":"67886def-b13b-463d-a4f6-3a0d13fa9580","Type":"ContainerDied","Data":"258b41ac30040120bf7170b64b39650ae2cf2663b0f86e0bc6bb587c054328b9"} Sep 30 12:41:46 crc kubenswrapper[5002]: I0930 12:41:46.878583 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9" Sep 30 12:41:46 crc kubenswrapper[5002]: I0930 12:41:46.878601 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="258b41ac30040120bf7170b64b39650ae2cf2663b0f86e0bc6bb587c054328b9" Sep 30 12:41:46 crc kubenswrapper[5002]: I0930 12:41:46.959874 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-mqcxz"] Sep 30 12:41:46 crc kubenswrapper[5002]: E0930 12:41:46.960376 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67886def-b13b-463d-a4f6-3a0d13fa9580" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 30 12:41:46 crc kubenswrapper[5002]: I0930 12:41:46.960400 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="67886def-b13b-463d-a4f6-3a0d13fa9580" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 30 12:41:46 crc kubenswrapper[5002]: I0930 12:41:46.960667 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="67886def-b13b-463d-a4f6-3a0d13fa9580" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 30 12:41:46 crc kubenswrapper[5002]: I0930 12:41:46.961461 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mqcxz" Sep 30 12:41:46 crc kubenswrapper[5002]: I0930 12:41:46.963766 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-ddhrk" Sep 30 12:41:46 crc kubenswrapper[5002]: I0930 12:41:46.964694 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 12:41:46 crc kubenswrapper[5002]: I0930 12:41:46.964746 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 12:41:46 crc kubenswrapper[5002]: I0930 12:41:46.965153 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 12:41:46 crc kubenswrapper[5002]: I0930 12:41:46.970943 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-mqcxz"] Sep 30 12:41:47 crc kubenswrapper[5002]: I0930 12:41:47.108142 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ad03eb09-92a2-4d00-9290-8b142d71fea6-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-mqcxz\" (UID: \"ad03eb09-92a2-4d00-9290-8b142d71fea6\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mqcxz" Sep 30 12:41:47 crc kubenswrapper[5002]: I0930 12:41:47.108266 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhf8t\" (UniqueName: \"kubernetes.io/projected/ad03eb09-92a2-4d00-9290-8b142d71fea6-kube-api-access-xhf8t\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-mqcxz\" (UID: \"ad03eb09-92a2-4d00-9290-8b142d71fea6\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mqcxz" Sep 30 12:41:47 crc kubenswrapper[5002]: I0930 12:41:47.108447 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ad03eb09-92a2-4d00-9290-8b142d71fea6-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-mqcxz\" (UID: \"ad03eb09-92a2-4d00-9290-8b142d71fea6\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mqcxz" Sep 30 12:41:47 crc kubenswrapper[5002]: I0930 12:41:47.210229 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ad03eb09-92a2-4d00-9290-8b142d71fea6-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-mqcxz\" (UID: \"ad03eb09-92a2-4d00-9290-8b142d71fea6\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mqcxz" Sep 30 12:41:47 crc kubenswrapper[5002]: I0930 12:41:47.210379 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhf8t\" (UniqueName: \"kubernetes.io/projected/ad03eb09-92a2-4d00-9290-8b142d71fea6-kube-api-access-xhf8t\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-mqcxz\" (UID: \"ad03eb09-92a2-4d00-9290-8b142d71fea6\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mqcxz" Sep 30 12:41:47 crc kubenswrapper[5002]: I0930 12:41:47.210596 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ad03eb09-92a2-4d00-9290-8b142d71fea6-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-mqcxz\" (UID: \"ad03eb09-92a2-4d00-9290-8b142d71fea6\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mqcxz" Sep 30 12:41:47 crc kubenswrapper[5002]: I0930 12:41:47.214134 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ad03eb09-92a2-4d00-9290-8b142d71fea6-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-mqcxz\" (UID: \"ad03eb09-92a2-4d00-9290-8b142d71fea6\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mqcxz" Sep 30 12:41:47 crc kubenswrapper[5002]: I0930 12:41:47.214660 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ad03eb09-92a2-4d00-9290-8b142d71fea6-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-mqcxz\" (UID: \"ad03eb09-92a2-4d00-9290-8b142d71fea6\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mqcxz" Sep 30 12:41:47 crc kubenswrapper[5002]: I0930 12:41:47.226404 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhf8t\" (UniqueName: \"kubernetes.io/projected/ad03eb09-92a2-4d00-9290-8b142d71fea6-kube-api-access-xhf8t\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-mqcxz\" (UID: \"ad03eb09-92a2-4d00-9290-8b142d71fea6\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mqcxz" Sep 30 12:41:47 crc kubenswrapper[5002]: I0930 12:41:47.282625 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mqcxz" Sep 30 12:41:47 crc kubenswrapper[5002]: I0930 12:41:47.832193 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-mqcxz"] Sep 30 12:41:47 crc kubenswrapper[5002]: W0930 12:41:47.836129 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podad03eb09_92a2_4d00_9290_8b142d71fea6.slice/crio-e6a3ad0e9a9193ab9de0f67ae80fcaa1afcd93079cde02c4760518f916a4687a WatchSource:0}: Error finding container e6a3ad0e9a9193ab9de0f67ae80fcaa1afcd93079cde02c4760518f916a4687a: Status 404 returned error can't find the container with id e6a3ad0e9a9193ab9de0f67ae80fcaa1afcd93079cde02c4760518f916a4687a Sep 30 12:41:47 crc kubenswrapper[5002]: I0930 12:41:47.889901 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mqcxz" event={"ID":"ad03eb09-92a2-4d00-9290-8b142d71fea6","Type":"ContainerStarted","Data":"e6a3ad0e9a9193ab9de0f67ae80fcaa1afcd93079cde02c4760518f916a4687a"} Sep 30 12:41:49 crc kubenswrapper[5002]: I0930 12:41:49.906584 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mqcxz" event={"ID":"ad03eb09-92a2-4d00-9290-8b142d71fea6","Type":"ContainerStarted","Data":"040fd4b655e249c718d8f3ea5bee2a0dd9ed7795a6aa41b4c7174cf3623d6aef"} Sep 30 12:41:49 crc kubenswrapper[5002]: I0930 12:41:49.927855 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mqcxz" podStartSLOduration=3.071692968 podStartE2EDuration="3.92783657s" podCreationTimestamp="2025-09-30 12:41:46 +0000 UTC" firstStartedPulling="2025-09-30 12:41:47.839892543 +0000 UTC m=+1282.089574699" lastFinishedPulling="2025-09-30 12:41:48.696036115 +0000 UTC m=+1282.945718301" observedRunningTime="2025-09-30 12:41:49.919415497 +0000 UTC m=+1284.169097653" watchObservedRunningTime="2025-09-30 12:41:49.92783657 +0000 UTC m=+1284.177518706" Sep 30 12:41:51 crc kubenswrapper[5002]: I0930 12:41:51.927836 5002 generic.go:334] "Generic (PLEG): container finished" podID="ad03eb09-92a2-4d00-9290-8b142d71fea6" containerID="040fd4b655e249c718d8f3ea5bee2a0dd9ed7795a6aa41b4c7174cf3623d6aef" exitCode=0 Sep 30 12:41:51 crc kubenswrapper[5002]: I0930 12:41:51.927913 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mqcxz" event={"ID":"ad03eb09-92a2-4d00-9290-8b142d71fea6","Type":"ContainerDied","Data":"040fd4b655e249c718d8f3ea5bee2a0dd9ed7795a6aa41b4c7174cf3623d6aef"} Sep 30 12:41:53 crc kubenswrapper[5002]: I0930 12:41:53.396245 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mqcxz" Sep 30 12:41:53 crc kubenswrapper[5002]: I0930 12:41:53.531631 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xhf8t\" (UniqueName: \"kubernetes.io/projected/ad03eb09-92a2-4d00-9290-8b142d71fea6-kube-api-access-xhf8t\") pod \"ad03eb09-92a2-4d00-9290-8b142d71fea6\" (UID: \"ad03eb09-92a2-4d00-9290-8b142d71fea6\") " Sep 30 12:41:53 crc kubenswrapper[5002]: I0930 12:41:53.531833 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ad03eb09-92a2-4d00-9290-8b142d71fea6-inventory\") pod \"ad03eb09-92a2-4d00-9290-8b142d71fea6\" (UID: \"ad03eb09-92a2-4d00-9290-8b142d71fea6\") " Sep 30 12:41:53 crc kubenswrapper[5002]: I0930 12:41:53.532141 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ad03eb09-92a2-4d00-9290-8b142d71fea6-ssh-key\") pod \"ad03eb09-92a2-4d00-9290-8b142d71fea6\" (UID: \"ad03eb09-92a2-4d00-9290-8b142d71fea6\") " Sep 30 12:41:53 crc kubenswrapper[5002]: I0930 12:41:53.539637 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad03eb09-92a2-4d00-9290-8b142d71fea6-kube-api-access-xhf8t" (OuterVolumeSpecName: "kube-api-access-xhf8t") pod "ad03eb09-92a2-4d00-9290-8b142d71fea6" (UID: "ad03eb09-92a2-4d00-9290-8b142d71fea6"). InnerVolumeSpecName "kube-api-access-xhf8t". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:41:53 crc kubenswrapper[5002]: I0930 12:41:53.572003 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad03eb09-92a2-4d00-9290-8b142d71fea6-inventory" (OuterVolumeSpecName: "inventory") pod "ad03eb09-92a2-4d00-9290-8b142d71fea6" (UID: "ad03eb09-92a2-4d00-9290-8b142d71fea6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:41:53 crc kubenswrapper[5002]: I0930 12:41:53.583080 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad03eb09-92a2-4d00-9290-8b142d71fea6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ad03eb09-92a2-4d00-9290-8b142d71fea6" (UID: "ad03eb09-92a2-4d00-9290-8b142d71fea6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:41:53 crc kubenswrapper[5002]: I0930 12:41:53.634229 5002 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ad03eb09-92a2-4d00-9290-8b142d71fea6-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 12:41:53 crc kubenswrapper[5002]: I0930 12:41:53.634264 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xhf8t\" (UniqueName: \"kubernetes.io/projected/ad03eb09-92a2-4d00-9290-8b142d71fea6-kube-api-access-xhf8t\") on node \"crc\" DevicePath \"\"" Sep 30 12:41:53 crc kubenswrapper[5002]: I0930 12:41:53.634284 5002 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ad03eb09-92a2-4d00-9290-8b142d71fea6-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 12:41:53 crc kubenswrapper[5002]: I0930 12:41:53.964889 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mqcxz" event={"ID":"ad03eb09-92a2-4d00-9290-8b142d71fea6","Type":"ContainerDied","Data":"e6a3ad0e9a9193ab9de0f67ae80fcaa1afcd93079cde02c4760518f916a4687a"} Sep 30 12:41:53 crc kubenswrapper[5002]: I0930 12:41:53.964941 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e6a3ad0e9a9193ab9de0f67ae80fcaa1afcd93079cde02c4760518f916a4687a" Sep 30 12:41:53 crc kubenswrapper[5002]: I0930 12:41:53.965011 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mqcxz" Sep 30 12:41:54 crc kubenswrapper[5002]: I0930 12:41:54.032866 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z"] Sep 30 12:41:54 crc kubenswrapper[5002]: E0930 12:41:54.033392 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad03eb09-92a2-4d00-9290-8b142d71fea6" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Sep 30 12:41:54 crc kubenswrapper[5002]: I0930 12:41:54.033416 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad03eb09-92a2-4d00-9290-8b142d71fea6" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Sep 30 12:41:54 crc kubenswrapper[5002]: I0930 12:41:54.033670 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad03eb09-92a2-4d00-9290-8b142d71fea6" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Sep 30 12:41:54 crc kubenswrapper[5002]: I0930 12:41:54.035890 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z" Sep 30 12:41:54 crc kubenswrapper[5002]: I0930 12:41:54.038537 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 12:41:54 crc kubenswrapper[5002]: I0930 12:41:54.038813 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 12:41:54 crc kubenswrapper[5002]: I0930 12:41:54.038880 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 12:41:54 crc kubenswrapper[5002]: I0930 12:41:54.039046 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-ddhrk" Sep 30 12:41:54 crc kubenswrapper[5002]: I0930 12:41:54.042093 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z"] Sep 30 12:41:54 crc kubenswrapper[5002]: I0930 12:41:54.144554 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f77ch\" (UniqueName: \"kubernetes.io/projected/016126fa-8541-4424-b217-acf7d88e5680-kube-api-access-f77ch\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z\" (UID: \"016126fa-8541-4424-b217-acf7d88e5680\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z" Sep 30 12:41:54 crc kubenswrapper[5002]: I0930 12:41:54.144612 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/016126fa-8541-4424-b217-acf7d88e5680-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z\" (UID: \"016126fa-8541-4424-b217-acf7d88e5680\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z" Sep 30 12:41:54 crc kubenswrapper[5002]: I0930 12:41:54.144664 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/016126fa-8541-4424-b217-acf7d88e5680-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z\" (UID: \"016126fa-8541-4424-b217-acf7d88e5680\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z" Sep 30 12:41:54 crc kubenswrapper[5002]: I0930 12:41:54.144978 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/016126fa-8541-4424-b217-acf7d88e5680-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z\" (UID: \"016126fa-8541-4424-b217-acf7d88e5680\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z" Sep 30 12:41:54 crc kubenswrapper[5002]: I0930 12:41:54.247076 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/016126fa-8541-4424-b217-acf7d88e5680-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z\" (UID: \"016126fa-8541-4424-b217-acf7d88e5680\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z" Sep 30 12:41:54 crc kubenswrapper[5002]: I0930 12:41:54.247273 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f77ch\" (UniqueName: \"kubernetes.io/projected/016126fa-8541-4424-b217-acf7d88e5680-kube-api-access-f77ch\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z\" (UID: \"016126fa-8541-4424-b217-acf7d88e5680\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z" Sep 30 12:41:54 crc kubenswrapper[5002]: I0930 12:41:54.247309 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/016126fa-8541-4424-b217-acf7d88e5680-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z\" (UID: \"016126fa-8541-4424-b217-acf7d88e5680\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z" Sep 30 12:41:54 crc kubenswrapper[5002]: I0930 12:41:54.247484 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/016126fa-8541-4424-b217-acf7d88e5680-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z\" (UID: \"016126fa-8541-4424-b217-acf7d88e5680\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z" Sep 30 12:41:54 crc kubenswrapper[5002]: I0930 12:41:54.253134 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/016126fa-8541-4424-b217-acf7d88e5680-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z\" (UID: \"016126fa-8541-4424-b217-acf7d88e5680\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z" Sep 30 12:41:54 crc kubenswrapper[5002]: I0930 12:41:54.253526 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/016126fa-8541-4424-b217-acf7d88e5680-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z\" (UID: \"016126fa-8541-4424-b217-acf7d88e5680\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z" Sep 30 12:41:54 crc kubenswrapper[5002]: I0930 12:41:54.258051 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/016126fa-8541-4424-b217-acf7d88e5680-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z\" (UID: \"016126fa-8541-4424-b217-acf7d88e5680\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z" Sep 30 12:41:54 crc kubenswrapper[5002]: I0930 12:41:54.273292 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f77ch\" (UniqueName: \"kubernetes.io/projected/016126fa-8541-4424-b217-acf7d88e5680-kube-api-access-f77ch\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z\" (UID: \"016126fa-8541-4424-b217-acf7d88e5680\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z" Sep 30 12:41:54 crc kubenswrapper[5002]: I0930 12:41:54.359822 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z" Sep 30 12:41:54 crc kubenswrapper[5002]: I0930 12:41:54.883940 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z"] Sep 30 12:41:54 crc kubenswrapper[5002]: I0930 12:41:54.975112 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z" event={"ID":"016126fa-8541-4424-b217-acf7d88e5680","Type":"ContainerStarted","Data":"34165d7b717e24d84e907ac2b7ae43bbe8eed8a01658b901351d9db2d95c2ed1"} Sep 30 12:41:55 crc kubenswrapper[5002]: I0930 12:41:55.989017 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z" event={"ID":"016126fa-8541-4424-b217-acf7d88e5680","Type":"ContainerStarted","Data":"9909b195785c7e85f31636d3277020e46c9dfd99fc0e1735cf809ee63cc01cdf"} Sep 30 12:41:56 crc kubenswrapper[5002]: I0930 12:41:56.010421 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z" podStartSLOduration=1.477703936 podStartE2EDuration="2.010404242s" podCreationTimestamp="2025-09-30 12:41:54 +0000 UTC" firstStartedPulling="2025-09-30 12:41:54.891315223 +0000 UTC m=+1289.140997369" lastFinishedPulling="2025-09-30 12:41:55.424015519 +0000 UTC m=+1289.673697675" observedRunningTime="2025-09-30 12:41:56.00479674 +0000 UTC m=+1290.254478886" watchObservedRunningTime="2025-09-30 12:41:56.010404242 +0000 UTC m=+1290.260086388" Sep 30 12:42:02 crc kubenswrapper[5002]: I0930 12:42:02.098870 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:42:02 crc kubenswrapper[5002]: I0930 12:42:02.099616 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:42:02 crc kubenswrapper[5002]: I0930 12:42:02.099691 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" Sep 30 12:42:02 crc kubenswrapper[5002]: I0930 12:42:02.100388 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5f3022bd0e514b5d4d606e295722375190cfc36100f55199d7e8623cc30f07d7"} pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 12:42:02 crc kubenswrapper[5002]: I0930 12:42:02.100502 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" containerID="cri-o://5f3022bd0e514b5d4d606e295722375190cfc36100f55199d7e8623cc30f07d7" gracePeriod=600 Sep 30 12:42:03 crc kubenswrapper[5002]: I0930 12:42:03.071139 5002 generic.go:334] "Generic (PLEG): container finished" podID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerID="5f3022bd0e514b5d4d606e295722375190cfc36100f55199d7e8623cc30f07d7" exitCode=0 Sep 30 12:42:03 crc kubenswrapper[5002]: I0930 12:42:03.071678 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerDied","Data":"5f3022bd0e514b5d4d606e295722375190cfc36100f55199d7e8623cc30f07d7"} Sep 30 12:42:03 crc kubenswrapper[5002]: I0930 12:42:03.071860 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerStarted","Data":"a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677"} Sep 30 12:42:03 crc kubenswrapper[5002]: I0930 12:42:03.071886 5002 scope.go:117] "RemoveContainer" containerID="9886eed05d26a67a26421b788732fc6013ba20fcc19e4bc6732960e19d9a03e8" Sep 30 12:42:40 crc kubenswrapper[5002]: I0930 12:42:40.944851 5002 scope.go:117] "RemoveContainer" containerID="52a3d506ba9667dd4ac3b0002ed67025ec53a1082262c1141d589c0c001cf343" Sep 30 12:42:54 crc kubenswrapper[5002]: I0930 12:42:54.653432 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-cluster-samples-operator_cluster-samples-operator-665b6dd947-n5jzk_b91e38cc-c852-407d-8efd-227f0bfaa5fb/cluster-samples-operator/0.log" Sep 30 12:42:54 crc kubenswrapper[5002]: I0930 12:42:54.653926 5002 generic.go:334] "Generic (PLEG): container finished" podID="b91e38cc-c852-407d-8efd-227f0bfaa5fb" containerID="ab3d31b1199757bffa455cd8fd46c35048ab96383e132e6d78fa4e192461b8e0" exitCode=2 Sep 30 12:42:54 crc kubenswrapper[5002]: I0930 12:42:54.653963 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n5jzk" event={"ID":"b91e38cc-c852-407d-8efd-227f0bfaa5fb","Type":"ContainerDied","Data":"ab3d31b1199757bffa455cd8fd46c35048ab96383e132e6d78fa4e192461b8e0"} Sep 30 12:42:54 crc kubenswrapper[5002]: I0930 12:42:54.655041 5002 scope.go:117] "RemoveContainer" containerID="ab3d31b1199757bffa455cd8fd46c35048ab96383e132e6d78fa4e192461b8e0" Sep 30 12:42:55 crc kubenswrapper[5002]: I0930 12:42:55.665377 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-cluster-samples-operator_cluster-samples-operator-665b6dd947-n5jzk_b91e38cc-c852-407d-8efd-227f0bfaa5fb/cluster-samples-operator/0.log" Sep 30 12:42:55 crc kubenswrapper[5002]: I0930 12:42:55.665748 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n5jzk" event={"ID":"b91e38cc-c852-407d-8efd-227f0bfaa5fb","Type":"ContainerStarted","Data":"21552058f00bc006df46e241569b570cdce372856a1e726dedff689d4546a3db"} Sep 30 12:43:35 crc kubenswrapper[5002]: I0930 12:43:35.755954 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tx9kr"] Sep 30 12:43:35 crc kubenswrapper[5002]: I0930 12:43:35.759029 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tx9kr" Sep 30 12:43:35 crc kubenswrapper[5002]: I0930 12:43:35.769426 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tx9kr"] Sep 30 12:43:35 crc kubenswrapper[5002]: I0930 12:43:35.840179 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qclq\" (UniqueName: \"kubernetes.io/projected/910f4a27-8001-4b83-8423-f1e4355307f2-kube-api-access-5qclq\") pod \"redhat-operators-tx9kr\" (UID: \"910f4a27-8001-4b83-8423-f1e4355307f2\") " pod="openshift-marketplace/redhat-operators-tx9kr" Sep 30 12:43:35 crc kubenswrapper[5002]: I0930 12:43:35.840244 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/910f4a27-8001-4b83-8423-f1e4355307f2-catalog-content\") pod \"redhat-operators-tx9kr\" (UID: \"910f4a27-8001-4b83-8423-f1e4355307f2\") " pod="openshift-marketplace/redhat-operators-tx9kr" Sep 30 12:43:35 crc kubenswrapper[5002]: I0930 12:43:35.840597 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/910f4a27-8001-4b83-8423-f1e4355307f2-utilities\") pod \"redhat-operators-tx9kr\" (UID: \"910f4a27-8001-4b83-8423-f1e4355307f2\") " pod="openshift-marketplace/redhat-operators-tx9kr" Sep 30 12:43:35 crc kubenswrapper[5002]: I0930 12:43:35.942808 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qclq\" (UniqueName: \"kubernetes.io/projected/910f4a27-8001-4b83-8423-f1e4355307f2-kube-api-access-5qclq\") pod \"redhat-operators-tx9kr\" (UID: \"910f4a27-8001-4b83-8423-f1e4355307f2\") " pod="openshift-marketplace/redhat-operators-tx9kr" Sep 30 12:43:35 crc kubenswrapper[5002]: I0930 12:43:35.943058 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/910f4a27-8001-4b83-8423-f1e4355307f2-catalog-content\") pod \"redhat-operators-tx9kr\" (UID: \"910f4a27-8001-4b83-8423-f1e4355307f2\") " pod="openshift-marketplace/redhat-operators-tx9kr" Sep 30 12:43:35 crc kubenswrapper[5002]: I0930 12:43:35.943171 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/910f4a27-8001-4b83-8423-f1e4355307f2-utilities\") pod \"redhat-operators-tx9kr\" (UID: \"910f4a27-8001-4b83-8423-f1e4355307f2\") " pod="openshift-marketplace/redhat-operators-tx9kr" Sep 30 12:43:35 crc kubenswrapper[5002]: I0930 12:43:35.943750 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/910f4a27-8001-4b83-8423-f1e4355307f2-utilities\") pod \"redhat-operators-tx9kr\" (UID: \"910f4a27-8001-4b83-8423-f1e4355307f2\") " pod="openshift-marketplace/redhat-operators-tx9kr" Sep 30 12:43:35 crc kubenswrapper[5002]: I0930 12:43:35.944110 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/910f4a27-8001-4b83-8423-f1e4355307f2-catalog-content\") pod \"redhat-operators-tx9kr\" (UID: \"910f4a27-8001-4b83-8423-f1e4355307f2\") " pod="openshift-marketplace/redhat-operators-tx9kr" Sep 30 12:43:35 crc kubenswrapper[5002]: I0930 12:43:35.967451 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qclq\" (UniqueName: \"kubernetes.io/projected/910f4a27-8001-4b83-8423-f1e4355307f2-kube-api-access-5qclq\") pod \"redhat-operators-tx9kr\" (UID: \"910f4a27-8001-4b83-8423-f1e4355307f2\") " pod="openshift-marketplace/redhat-operators-tx9kr" Sep 30 12:43:36 crc kubenswrapper[5002]: I0930 12:43:36.081334 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tx9kr" Sep 30 12:43:36 crc kubenswrapper[5002]: I0930 12:43:36.552570 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tx9kr"] Sep 30 12:43:37 crc kubenswrapper[5002]: I0930 12:43:37.070045 5002 generic.go:334] "Generic (PLEG): container finished" podID="910f4a27-8001-4b83-8423-f1e4355307f2" containerID="1f9bd3773346c766b718739c776ea6ce0998bbd4619c4bdb5ad23cd8ca2f485e" exitCode=0 Sep 30 12:43:37 crc kubenswrapper[5002]: I0930 12:43:37.070102 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tx9kr" event={"ID":"910f4a27-8001-4b83-8423-f1e4355307f2","Type":"ContainerDied","Data":"1f9bd3773346c766b718739c776ea6ce0998bbd4619c4bdb5ad23cd8ca2f485e"} Sep 30 12:43:37 crc kubenswrapper[5002]: I0930 12:43:37.071296 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tx9kr" event={"ID":"910f4a27-8001-4b83-8423-f1e4355307f2","Type":"ContainerStarted","Data":"aeabff9a2069b460613cc430bd9bb1ba2adf01bec1d41bd9f17ed2759320029c"} Sep 30 12:43:39 crc kubenswrapper[5002]: I0930 12:43:39.092133 5002 generic.go:334] "Generic (PLEG): container finished" podID="910f4a27-8001-4b83-8423-f1e4355307f2" containerID="35869b452a7cf3a1854b40582e205c55e7f7d21d765c33f198409008e2111283" exitCode=0 Sep 30 12:43:39 crc kubenswrapper[5002]: I0930 12:43:39.092635 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tx9kr" event={"ID":"910f4a27-8001-4b83-8423-f1e4355307f2","Type":"ContainerDied","Data":"35869b452a7cf3a1854b40582e205c55e7f7d21d765c33f198409008e2111283"} Sep 30 12:43:40 crc kubenswrapper[5002]: I0930 12:43:40.106041 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tx9kr" event={"ID":"910f4a27-8001-4b83-8423-f1e4355307f2","Type":"ContainerStarted","Data":"97dbbcc60e21cdf0fdbadc930bb5dad7910ca9267b5e1d621566121d15838102"} Sep 30 12:43:40 crc kubenswrapper[5002]: I0930 12:43:40.126862 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tx9kr" podStartSLOduration=2.5384928110000002 podStartE2EDuration="5.12684086s" podCreationTimestamp="2025-09-30 12:43:35 +0000 UTC" firstStartedPulling="2025-09-30 12:43:37.071971791 +0000 UTC m=+1391.321653937" lastFinishedPulling="2025-09-30 12:43:39.66031982 +0000 UTC m=+1393.910001986" observedRunningTime="2025-09-30 12:43:40.1238116 +0000 UTC m=+1394.373493796" watchObservedRunningTime="2025-09-30 12:43:40.12684086 +0000 UTC m=+1394.376523026" Sep 30 12:43:41 crc kubenswrapper[5002]: I0930 12:43:41.011573 5002 scope.go:117] "RemoveContainer" containerID="0e29b308ed897f927e40965d64a3b8d17defa7c2ebd98f7009a18984fcf63925" Sep 30 12:43:46 crc kubenswrapper[5002]: I0930 12:43:46.082120 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tx9kr" Sep 30 12:43:46 crc kubenswrapper[5002]: I0930 12:43:46.082868 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tx9kr" Sep 30 12:43:46 crc kubenswrapper[5002]: I0930 12:43:46.156036 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tx9kr" Sep 30 12:43:46 crc kubenswrapper[5002]: I0930 12:43:46.230228 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tx9kr" Sep 30 12:43:46 crc kubenswrapper[5002]: I0930 12:43:46.403228 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tx9kr"] Sep 30 12:43:48 crc kubenswrapper[5002]: I0930 12:43:48.184499 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tx9kr" podUID="910f4a27-8001-4b83-8423-f1e4355307f2" containerName="registry-server" containerID="cri-o://97dbbcc60e21cdf0fdbadc930bb5dad7910ca9267b5e1d621566121d15838102" gracePeriod=2 Sep 30 12:43:48 crc kubenswrapper[5002]: I0930 12:43:48.640705 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tx9kr" Sep 30 12:43:48 crc kubenswrapper[5002]: I0930 12:43:48.691893 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5qclq\" (UniqueName: \"kubernetes.io/projected/910f4a27-8001-4b83-8423-f1e4355307f2-kube-api-access-5qclq\") pod \"910f4a27-8001-4b83-8423-f1e4355307f2\" (UID: \"910f4a27-8001-4b83-8423-f1e4355307f2\") " Sep 30 12:43:48 crc kubenswrapper[5002]: I0930 12:43:48.693138 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/910f4a27-8001-4b83-8423-f1e4355307f2-utilities\") pod \"910f4a27-8001-4b83-8423-f1e4355307f2\" (UID: \"910f4a27-8001-4b83-8423-f1e4355307f2\") " Sep 30 12:43:48 crc kubenswrapper[5002]: I0930 12:43:48.693171 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/910f4a27-8001-4b83-8423-f1e4355307f2-catalog-content\") pod \"910f4a27-8001-4b83-8423-f1e4355307f2\" (UID: \"910f4a27-8001-4b83-8423-f1e4355307f2\") " Sep 30 12:43:48 crc kubenswrapper[5002]: I0930 12:43:48.693838 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/910f4a27-8001-4b83-8423-f1e4355307f2-utilities" (OuterVolumeSpecName: "utilities") pod "910f4a27-8001-4b83-8423-f1e4355307f2" (UID: "910f4a27-8001-4b83-8423-f1e4355307f2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:43:48 crc kubenswrapper[5002]: I0930 12:43:48.698413 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/910f4a27-8001-4b83-8423-f1e4355307f2-kube-api-access-5qclq" (OuterVolumeSpecName: "kube-api-access-5qclq") pod "910f4a27-8001-4b83-8423-f1e4355307f2" (UID: "910f4a27-8001-4b83-8423-f1e4355307f2"). InnerVolumeSpecName "kube-api-access-5qclq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:43:48 crc kubenswrapper[5002]: I0930 12:43:48.793238 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/910f4a27-8001-4b83-8423-f1e4355307f2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "910f4a27-8001-4b83-8423-f1e4355307f2" (UID: "910f4a27-8001-4b83-8423-f1e4355307f2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:43:48 crc kubenswrapper[5002]: I0930 12:43:48.795280 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/910f4a27-8001-4b83-8423-f1e4355307f2-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 12:43:48 crc kubenswrapper[5002]: I0930 12:43:48.795326 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/910f4a27-8001-4b83-8423-f1e4355307f2-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 12:43:48 crc kubenswrapper[5002]: I0930 12:43:48.795347 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5qclq\" (UniqueName: \"kubernetes.io/projected/910f4a27-8001-4b83-8423-f1e4355307f2-kube-api-access-5qclq\") on node \"crc\" DevicePath \"\"" Sep 30 12:43:49 crc kubenswrapper[5002]: I0930 12:43:49.196934 5002 generic.go:334] "Generic (PLEG): container finished" podID="910f4a27-8001-4b83-8423-f1e4355307f2" containerID="97dbbcc60e21cdf0fdbadc930bb5dad7910ca9267b5e1d621566121d15838102" exitCode=0 Sep 30 12:43:49 crc kubenswrapper[5002]: I0930 12:43:49.196987 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tx9kr" event={"ID":"910f4a27-8001-4b83-8423-f1e4355307f2","Type":"ContainerDied","Data":"97dbbcc60e21cdf0fdbadc930bb5dad7910ca9267b5e1d621566121d15838102"} Sep 30 12:43:49 crc kubenswrapper[5002]: I0930 12:43:49.197012 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tx9kr" event={"ID":"910f4a27-8001-4b83-8423-f1e4355307f2","Type":"ContainerDied","Data":"aeabff9a2069b460613cc430bd9bb1ba2adf01bec1d41bd9f17ed2759320029c"} Sep 30 12:43:49 crc kubenswrapper[5002]: I0930 12:43:49.197029 5002 scope.go:117] "RemoveContainer" containerID="97dbbcc60e21cdf0fdbadc930bb5dad7910ca9267b5e1d621566121d15838102" Sep 30 12:43:49 crc kubenswrapper[5002]: I0930 12:43:49.197036 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tx9kr" Sep 30 12:43:49 crc kubenswrapper[5002]: I0930 12:43:49.251122 5002 scope.go:117] "RemoveContainer" containerID="35869b452a7cf3a1854b40582e205c55e7f7d21d765c33f198409008e2111283" Sep 30 12:43:49 crc kubenswrapper[5002]: I0930 12:43:49.252881 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tx9kr"] Sep 30 12:43:49 crc kubenswrapper[5002]: I0930 12:43:49.268674 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tx9kr"] Sep 30 12:43:49 crc kubenswrapper[5002]: I0930 12:43:49.286828 5002 scope.go:117] "RemoveContainer" containerID="1f9bd3773346c766b718739c776ea6ce0998bbd4619c4bdb5ad23cd8ca2f485e" Sep 30 12:43:49 crc kubenswrapper[5002]: I0930 12:43:49.331542 5002 scope.go:117] "RemoveContainer" containerID="97dbbcc60e21cdf0fdbadc930bb5dad7910ca9267b5e1d621566121d15838102" Sep 30 12:43:49 crc kubenswrapper[5002]: E0930 12:43:49.332101 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97dbbcc60e21cdf0fdbadc930bb5dad7910ca9267b5e1d621566121d15838102\": container with ID starting with 97dbbcc60e21cdf0fdbadc930bb5dad7910ca9267b5e1d621566121d15838102 not found: ID does not exist" containerID="97dbbcc60e21cdf0fdbadc930bb5dad7910ca9267b5e1d621566121d15838102" Sep 30 12:43:49 crc kubenswrapper[5002]: I0930 12:43:49.332149 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97dbbcc60e21cdf0fdbadc930bb5dad7910ca9267b5e1d621566121d15838102"} err="failed to get container status \"97dbbcc60e21cdf0fdbadc930bb5dad7910ca9267b5e1d621566121d15838102\": rpc error: code = NotFound desc = could not find container \"97dbbcc60e21cdf0fdbadc930bb5dad7910ca9267b5e1d621566121d15838102\": container with ID starting with 97dbbcc60e21cdf0fdbadc930bb5dad7910ca9267b5e1d621566121d15838102 not found: ID does not exist" Sep 30 12:43:49 crc kubenswrapper[5002]: I0930 12:43:49.332185 5002 scope.go:117] "RemoveContainer" containerID="35869b452a7cf3a1854b40582e205c55e7f7d21d765c33f198409008e2111283" Sep 30 12:43:49 crc kubenswrapper[5002]: E0930 12:43:49.332673 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"35869b452a7cf3a1854b40582e205c55e7f7d21d765c33f198409008e2111283\": container with ID starting with 35869b452a7cf3a1854b40582e205c55e7f7d21d765c33f198409008e2111283 not found: ID does not exist" containerID="35869b452a7cf3a1854b40582e205c55e7f7d21d765c33f198409008e2111283" Sep 30 12:43:49 crc kubenswrapper[5002]: I0930 12:43:49.332741 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35869b452a7cf3a1854b40582e205c55e7f7d21d765c33f198409008e2111283"} err="failed to get container status \"35869b452a7cf3a1854b40582e205c55e7f7d21d765c33f198409008e2111283\": rpc error: code = NotFound desc = could not find container \"35869b452a7cf3a1854b40582e205c55e7f7d21d765c33f198409008e2111283\": container with ID starting with 35869b452a7cf3a1854b40582e205c55e7f7d21d765c33f198409008e2111283 not found: ID does not exist" Sep 30 12:43:49 crc kubenswrapper[5002]: I0930 12:43:49.332770 5002 scope.go:117] "RemoveContainer" containerID="1f9bd3773346c766b718739c776ea6ce0998bbd4619c4bdb5ad23cd8ca2f485e" Sep 30 12:43:49 crc kubenswrapper[5002]: E0930 12:43:49.333203 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f9bd3773346c766b718739c776ea6ce0998bbd4619c4bdb5ad23cd8ca2f485e\": container with ID starting with 1f9bd3773346c766b718739c776ea6ce0998bbd4619c4bdb5ad23cd8ca2f485e not found: ID does not exist" containerID="1f9bd3773346c766b718739c776ea6ce0998bbd4619c4bdb5ad23cd8ca2f485e" Sep 30 12:43:49 crc kubenswrapper[5002]: I0930 12:43:49.333252 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f9bd3773346c766b718739c776ea6ce0998bbd4619c4bdb5ad23cd8ca2f485e"} err="failed to get container status \"1f9bd3773346c766b718739c776ea6ce0998bbd4619c4bdb5ad23cd8ca2f485e\": rpc error: code = NotFound desc = could not find container \"1f9bd3773346c766b718739c776ea6ce0998bbd4619c4bdb5ad23cd8ca2f485e\": container with ID starting with 1f9bd3773346c766b718739c776ea6ce0998bbd4619c4bdb5ad23cd8ca2f485e not found: ID does not exist" Sep 30 12:43:50 crc kubenswrapper[5002]: I0930 12:43:50.689029 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="910f4a27-8001-4b83-8423-f1e4355307f2" path="/var/lib/kubelet/pods/910f4a27-8001-4b83-8423-f1e4355307f2/volumes" Sep 30 12:44:02 crc kubenswrapper[5002]: I0930 12:44:02.098084 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:44:02 crc kubenswrapper[5002]: I0930 12:44:02.098666 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:44:09 crc kubenswrapper[5002]: I0930 12:44:09.522797 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nt44b"] Sep 30 12:44:09 crc kubenswrapper[5002]: E0930 12:44:09.525166 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="910f4a27-8001-4b83-8423-f1e4355307f2" containerName="extract-utilities" Sep 30 12:44:09 crc kubenswrapper[5002]: I0930 12:44:09.525293 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="910f4a27-8001-4b83-8423-f1e4355307f2" containerName="extract-utilities" Sep 30 12:44:09 crc kubenswrapper[5002]: E0930 12:44:09.525396 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="910f4a27-8001-4b83-8423-f1e4355307f2" containerName="extract-content" Sep 30 12:44:09 crc kubenswrapper[5002]: I0930 12:44:09.525504 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="910f4a27-8001-4b83-8423-f1e4355307f2" containerName="extract-content" Sep 30 12:44:09 crc kubenswrapper[5002]: E0930 12:44:09.525596 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="910f4a27-8001-4b83-8423-f1e4355307f2" containerName="registry-server" Sep 30 12:44:09 crc kubenswrapper[5002]: I0930 12:44:09.525664 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="910f4a27-8001-4b83-8423-f1e4355307f2" containerName="registry-server" Sep 30 12:44:09 crc kubenswrapper[5002]: I0930 12:44:09.525887 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="910f4a27-8001-4b83-8423-f1e4355307f2" containerName="registry-server" Sep 30 12:44:09 crc kubenswrapper[5002]: I0930 12:44:09.534485 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nt44b" Sep 30 12:44:09 crc kubenswrapper[5002]: I0930 12:44:09.571104 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nt44b"] Sep 30 12:44:09 crc kubenswrapper[5002]: I0930 12:44:09.676203 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6-utilities\") pod \"redhat-marketplace-nt44b\" (UID: \"bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6\") " pod="openshift-marketplace/redhat-marketplace-nt44b" Sep 30 12:44:09 crc kubenswrapper[5002]: I0930 12:44:09.676515 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6-catalog-content\") pod \"redhat-marketplace-nt44b\" (UID: \"bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6\") " pod="openshift-marketplace/redhat-marketplace-nt44b" Sep 30 12:44:09 crc kubenswrapper[5002]: I0930 12:44:09.676863 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7268\" (UniqueName: \"kubernetes.io/projected/bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6-kube-api-access-w7268\") pod \"redhat-marketplace-nt44b\" (UID: \"bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6\") " pod="openshift-marketplace/redhat-marketplace-nt44b" Sep 30 12:44:09 crc kubenswrapper[5002]: I0930 12:44:09.778133 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6-utilities\") pod \"redhat-marketplace-nt44b\" (UID: \"bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6\") " pod="openshift-marketplace/redhat-marketplace-nt44b" Sep 30 12:44:09 crc kubenswrapper[5002]: I0930 12:44:09.778217 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6-catalog-content\") pod \"redhat-marketplace-nt44b\" (UID: \"bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6\") " pod="openshift-marketplace/redhat-marketplace-nt44b" Sep 30 12:44:09 crc kubenswrapper[5002]: I0930 12:44:09.778297 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7268\" (UniqueName: \"kubernetes.io/projected/bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6-kube-api-access-w7268\") pod \"redhat-marketplace-nt44b\" (UID: \"bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6\") " pod="openshift-marketplace/redhat-marketplace-nt44b" Sep 30 12:44:09 crc kubenswrapper[5002]: I0930 12:44:09.778888 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6-utilities\") pod \"redhat-marketplace-nt44b\" (UID: \"bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6\") " pod="openshift-marketplace/redhat-marketplace-nt44b" Sep 30 12:44:09 crc kubenswrapper[5002]: I0930 12:44:09.778888 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6-catalog-content\") pod \"redhat-marketplace-nt44b\" (UID: \"bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6\") " pod="openshift-marketplace/redhat-marketplace-nt44b" Sep 30 12:44:09 crc kubenswrapper[5002]: I0930 12:44:09.800801 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7268\" (UniqueName: \"kubernetes.io/projected/bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6-kube-api-access-w7268\") pod \"redhat-marketplace-nt44b\" (UID: \"bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6\") " pod="openshift-marketplace/redhat-marketplace-nt44b" Sep 30 12:44:09 crc kubenswrapper[5002]: I0930 12:44:09.862694 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nt44b" Sep 30 12:44:10 crc kubenswrapper[5002]: I0930 12:44:10.336540 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nt44b"] Sep 30 12:44:10 crc kubenswrapper[5002]: I0930 12:44:10.395066 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nt44b" event={"ID":"bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6","Type":"ContainerStarted","Data":"18f88905563cd19243f7c2ee42ee51a636964a955d8bffe6d6bf55e600611a78"} Sep 30 12:44:11 crc kubenswrapper[5002]: I0930 12:44:11.405046 5002 generic.go:334] "Generic (PLEG): container finished" podID="bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6" containerID="b3c4abe11edbec7e98d366f431bda2ab49901b13de758b0be5324352a84d5613" exitCode=0 Sep 30 12:44:11 crc kubenswrapper[5002]: I0930 12:44:11.405085 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nt44b" event={"ID":"bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6","Type":"ContainerDied","Data":"b3c4abe11edbec7e98d366f431bda2ab49901b13de758b0be5324352a84d5613"} Sep 30 12:44:12 crc kubenswrapper[5002]: I0930 12:44:12.418511 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nt44b" event={"ID":"bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6","Type":"ContainerStarted","Data":"bbcd3fbd0d4be48fb85b1240d59895d61e8e3d3f6450b1103b4735fdd400be13"} Sep 30 12:44:13 crc kubenswrapper[5002]: I0930 12:44:13.438390 5002 generic.go:334] "Generic (PLEG): container finished" podID="bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6" containerID="bbcd3fbd0d4be48fb85b1240d59895d61e8e3d3f6450b1103b4735fdd400be13" exitCode=0 Sep 30 12:44:13 crc kubenswrapper[5002]: I0930 12:44:13.438873 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nt44b" event={"ID":"bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6","Type":"ContainerDied","Data":"bbcd3fbd0d4be48fb85b1240d59895d61e8e3d3f6450b1103b4735fdd400be13"} Sep 30 12:44:14 crc kubenswrapper[5002]: I0930 12:44:14.450544 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nt44b" event={"ID":"bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6","Type":"ContainerStarted","Data":"e90a6c97be8126603befaa779957a54f69aea8df26df49b6ebc2c0ed84bd5e12"} Sep 30 12:44:14 crc kubenswrapper[5002]: I0930 12:44:14.478040 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nt44b" podStartSLOduration=2.951879904 podStartE2EDuration="5.478018167s" podCreationTimestamp="2025-09-30 12:44:09 +0000 UTC" firstStartedPulling="2025-09-30 12:44:11.409495333 +0000 UTC m=+1425.659177479" lastFinishedPulling="2025-09-30 12:44:13.935633596 +0000 UTC m=+1428.185315742" observedRunningTime="2025-09-30 12:44:14.467007455 +0000 UTC m=+1428.716689631" watchObservedRunningTime="2025-09-30 12:44:14.478018167 +0000 UTC m=+1428.727700323" Sep 30 12:44:19 crc kubenswrapper[5002]: I0930 12:44:19.863108 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nt44b" Sep 30 12:44:19 crc kubenswrapper[5002]: I0930 12:44:19.864516 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nt44b" Sep 30 12:44:19 crc kubenswrapper[5002]: I0930 12:44:19.909888 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nt44b" Sep 30 12:44:20 crc kubenswrapper[5002]: I0930 12:44:20.554458 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nt44b" Sep 30 12:44:20 crc kubenswrapper[5002]: I0930 12:44:20.614106 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nt44b"] Sep 30 12:44:22 crc kubenswrapper[5002]: I0930 12:44:22.529091 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-nt44b" podUID="bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6" containerName="registry-server" containerID="cri-o://e90a6c97be8126603befaa779957a54f69aea8df26df49b6ebc2c0ed84bd5e12" gracePeriod=2 Sep 30 12:44:22 crc kubenswrapper[5002]: I0930 12:44:22.993092 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nt44b" Sep 30 12:44:23 crc kubenswrapper[5002]: I0930 12:44:23.035953 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7268\" (UniqueName: \"kubernetes.io/projected/bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6-kube-api-access-w7268\") pod \"bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6\" (UID: \"bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6\") " Sep 30 12:44:23 crc kubenswrapper[5002]: I0930 12:44:23.036286 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6-utilities\") pod \"bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6\" (UID: \"bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6\") " Sep 30 12:44:23 crc kubenswrapper[5002]: I0930 12:44:23.036443 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6-catalog-content\") pod \"bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6\" (UID: \"bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6\") " Sep 30 12:44:23 crc kubenswrapper[5002]: I0930 12:44:23.037255 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6-utilities" (OuterVolumeSpecName: "utilities") pod "bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6" (UID: "bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:44:23 crc kubenswrapper[5002]: I0930 12:44:23.042835 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6-kube-api-access-w7268" (OuterVolumeSpecName: "kube-api-access-w7268") pod "bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6" (UID: "bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6"). InnerVolumeSpecName "kube-api-access-w7268". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:44:23 crc kubenswrapper[5002]: I0930 12:44:23.049148 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6" (UID: "bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:44:23 crc kubenswrapper[5002]: I0930 12:44:23.137336 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 12:44:23 crc kubenswrapper[5002]: I0930 12:44:23.137363 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7268\" (UniqueName: \"kubernetes.io/projected/bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6-kube-api-access-w7268\") on node \"crc\" DevicePath \"\"" Sep 30 12:44:23 crc kubenswrapper[5002]: I0930 12:44:23.137375 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 12:44:23 crc kubenswrapper[5002]: I0930 12:44:23.544137 5002 generic.go:334] "Generic (PLEG): container finished" podID="bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6" containerID="e90a6c97be8126603befaa779957a54f69aea8df26df49b6ebc2c0ed84bd5e12" exitCode=0 Sep 30 12:44:23 crc kubenswrapper[5002]: I0930 12:44:23.544227 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nt44b" event={"ID":"bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6","Type":"ContainerDied","Data":"e90a6c97be8126603befaa779957a54f69aea8df26df49b6ebc2c0ed84bd5e12"} Sep 30 12:44:23 crc kubenswrapper[5002]: I0930 12:44:23.544306 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nt44b" event={"ID":"bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6","Type":"ContainerDied","Data":"18f88905563cd19243f7c2ee42ee51a636964a955d8bffe6d6bf55e600611a78"} Sep 30 12:44:23 crc kubenswrapper[5002]: I0930 12:44:23.544334 5002 scope.go:117] "RemoveContainer" containerID="e90a6c97be8126603befaa779957a54f69aea8df26df49b6ebc2c0ed84bd5e12" Sep 30 12:44:23 crc kubenswrapper[5002]: I0930 12:44:23.545686 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nt44b" Sep 30 12:44:23 crc kubenswrapper[5002]: I0930 12:44:23.584855 5002 scope.go:117] "RemoveContainer" containerID="bbcd3fbd0d4be48fb85b1240d59895d61e8e3d3f6450b1103b4735fdd400be13" Sep 30 12:44:23 crc kubenswrapper[5002]: I0930 12:44:23.597821 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nt44b"] Sep 30 12:44:23 crc kubenswrapper[5002]: I0930 12:44:23.612504 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-nt44b"] Sep 30 12:44:23 crc kubenswrapper[5002]: I0930 12:44:23.621970 5002 scope.go:117] "RemoveContainer" containerID="b3c4abe11edbec7e98d366f431bda2ab49901b13de758b0be5324352a84d5613" Sep 30 12:44:23 crc kubenswrapper[5002]: I0930 12:44:23.656373 5002 scope.go:117] "RemoveContainer" containerID="e90a6c97be8126603befaa779957a54f69aea8df26df49b6ebc2c0ed84bd5e12" Sep 30 12:44:23 crc kubenswrapper[5002]: E0930 12:44:23.656992 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e90a6c97be8126603befaa779957a54f69aea8df26df49b6ebc2c0ed84bd5e12\": container with ID starting with e90a6c97be8126603befaa779957a54f69aea8df26df49b6ebc2c0ed84bd5e12 not found: ID does not exist" containerID="e90a6c97be8126603befaa779957a54f69aea8df26df49b6ebc2c0ed84bd5e12" Sep 30 12:44:23 crc kubenswrapper[5002]: I0930 12:44:23.657041 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e90a6c97be8126603befaa779957a54f69aea8df26df49b6ebc2c0ed84bd5e12"} err="failed to get container status \"e90a6c97be8126603befaa779957a54f69aea8df26df49b6ebc2c0ed84bd5e12\": rpc error: code = NotFound desc = could not find container \"e90a6c97be8126603befaa779957a54f69aea8df26df49b6ebc2c0ed84bd5e12\": container with ID starting with e90a6c97be8126603befaa779957a54f69aea8df26df49b6ebc2c0ed84bd5e12 not found: ID does not exist" Sep 30 12:44:23 crc kubenswrapper[5002]: I0930 12:44:23.657073 5002 scope.go:117] "RemoveContainer" containerID="bbcd3fbd0d4be48fb85b1240d59895d61e8e3d3f6450b1103b4735fdd400be13" Sep 30 12:44:23 crc kubenswrapper[5002]: E0930 12:44:23.657422 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bbcd3fbd0d4be48fb85b1240d59895d61e8e3d3f6450b1103b4735fdd400be13\": container with ID starting with bbcd3fbd0d4be48fb85b1240d59895d61e8e3d3f6450b1103b4735fdd400be13 not found: ID does not exist" containerID="bbcd3fbd0d4be48fb85b1240d59895d61e8e3d3f6450b1103b4735fdd400be13" Sep 30 12:44:23 crc kubenswrapper[5002]: I0930 12:44:23.657461 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbcd3fbd0d4be48fb85b1240d59895d61e8e3d3f6450b1103b4735fdd400be13"} err="failed to get container status \"bbcd3fbd0d4be48fb85b1240d59895d61e8e3d3f6450b1103b4735fdd400be13\": rpc error: code = NotFound desc = could not find container \"bbcd3fbd0d4be48fb85b1240d59895d61e8e3d3f6450b1103b4735fdd400be13\": container with ID starting with bbcd3fbd0d4be48fb85b1240d59895d61e8e3d3f6450b1103b4735fdd400be13 not found: ID does not exist" Sep 30 12:44:23 crc kubenswrapper[5002]: I0930 12:44:23.657563 5002 scope.go:117] "RemoveContainer" containerID="b3c4abe11edbec7e98d366f431bda2ab49901b13de758b0be5324352a84d5613" Sep 30 12:44:23 crc kubenswrapper[5002]: E0930 12:44:23.658034 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3c4abe11edbec7e98d366f431bda2ab49901b13de758b0be5324352a84d5613\": container with ID starting with b3c4abe11edbec7e98d366f431bda2ab49901b13de758b0be5324352a84d5613 not found: ID does not exist" containerID="b3c4abe11edbec7e98d366f431bda2ab49901b13de758b0be5324352a84d5613" Sep 30 12:44:23 crc kubenswrapper[5002]: I0930 12:44:23.658064 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3c4abe11edbec7e98d366f431bda2ab49901b13de758b0be5324352a84d5613"} err="failed to get container status \"b3c4abe11edbec7e98d366f431bda2ab49901b13de758b0be5324352a84d5613\": rpc error: code = NotFound desc = could not find container \"b3c4abe11edbec7e98d366f431bda2ab49901b13de758b0be5324352a84d5613\": container with ID starting with b3c4abe11edbec7e98d366f431bda2ab49901b13de758b0be5324352a84d5613 not found: ID does not exist" Sep 30 12:44:24 crc kubenswrapper[5002]: I0930 12:44:24.693095 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6" path="/var/lib/kubelet/pods/bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6/volumes" Sep 30 12:44:25 crc kubenswrapper[5002]: I0930 12:44:25.565833 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xlmcv"] Sep 30 12:44:25 crc kubenswrapper[5002]: E0930 12:44:25.567144 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6" containerName="registry-server" Sep 30 12:44:25 crc kubenswrapper[5002]: I0930 12:44:25.567175 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6" containerName="registry-server" Sep 30 12:44:25 crc kubenswrapper[5002]: E0930 12:44:25.567206 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6" containerName="extract-content" Sep 30 12:44:25 crc kubenswrapper[5002]: I0930 12:44:25.567215 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6" containerName="extract-content" Sep 30 12:44:25 crc kubenswrapper[5002]: E0930 12:44:25.567238 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6" containerName="extract-utilities" Sep 30 12:44:25 crc kubenswrapper[5002]: I0930 12:44:25.567249 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6" containerName="extract-utilities" Sep 30 12:44:25 crc kubenswrapper[5002]: I0930 12:44:25.567496 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd4d1ae3-f4bb-43e3-835a-881e5b8d22c6" containerName="registry-server" Sep 30 12:44:25 crc kubenswrapper[5002]: I0930 12:44:25.577188 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xlmcv" Sep 30 12:44:25 crc kubenswrapper[5002]: I0930 12:44:25.583158 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96gnn\" (UniqueName: \"kubernetes.io/projected/61db55ba-978d-4ac8-8303-d52005542b2c-kube-api-access-96gnn\") pod \"community-operators-xlmcv\" (UID: \"61db55ba-978d-4ac8-8303-d52005542b2c\") " pod="openshift-marketplace/community-operators-xlmcv" Sep 30 12:44:25 crc kubenswrapper[5002]: I0930 12:44:25.583279 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61db55ba-978d-4ac8-8303-d52005542b2c-catalog-content\") pod \"community-operators-xlmcv\" (UID: \"61db55ba-978d-4ac8-8303-d52005542b2c\") " pod="openshift-marketplace/community-operators-xlmcv" Sep 30 12:44:25 crc kubenswrapper[5002]: I0930 12:44:25.583325 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61db55ba-978d-4ac8-8303-d52005542b2c-utilities\") pod \"community-operators-xlmcv\" (UID: \"61db55ba-978d-4ac8-8303-d52005542b2c\") " pod="openshift-marketplace/community-operators-xlmcv" Sep 30 12:44:25 crc kubenswrapper[5002]: I0930 12:44:25.595953 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xlmcv"] Sep 30 12:44:25 crc kubenswrapper[5002]: I0930 12:44:25.684510 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96gnn\" (UniqueName: \"kubernetes.io/projected/61db55ba-978d-4ac8-8303-d52005542b2c-kube-api-access-96gnn\") pod \"community-operators-xlmcv\" (UID: \"61db55ba-978d-4ac8-8303-d52005542b2c\") " pod="openshift-marketplace/community-operators-xlmcv" Sep 30 12:44:25 crc kubenswrapper[5002]: I0930 12:44:25.684611 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61db55ba-978d-4ac8-8303-d52005542b2c-catalog-content\") pod \"community-operators-xlmcv\" (UID: \"61db55ba-978d-4ac8-8303-d52005542b2c\") " pod="openshift-marketplace/community-operators-xlmcv" Sep 30 12:44:25 crc kubenswrapper[5002]: I0930 12:44:25.684657 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61db55ba-978d-4ac8-8303-d52005542b2c-utilities\") pod \"community-operators-xlmcv\" (UID: \"61db55ba-978d-4ac8-8303-d52005542b2c\") " pod="openshift-marketplace/community-operators-xlmcv" Sep 30 12:44:25 crc kubenswrapper[5002]: I0930 12:44:25.685228 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61db55ba-978d-4ac8-8303-d52005542b2c-utilities\") pod \"community-operators-xlmcv\" (UID: \"61db55ba-978d-4ac8-8303-d52005542b2c\") " pod="openshift-marketplace/community-operators-xlmcv" Sep 30 12:44:25 crc kubenswrapper[5002]: I0930 12:44:25.685284 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61db55ba-978d-4ac8-8303-d52005542b2c-catalog-content\") pod \"community-operators-xlmcv\" (UID: \"61db55ba-978d-4ac8-8303-d52005542b2c\") " pod="openshift-marketplace/community-operators-xlmcv" Sep 30 12:44:25 crc kubenswrapper[5002]: I0930 12:44:25.703420 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96gnn\" (UniqueName: \"kubernetes.io/projected/61db55ba-978d-4ac8-8303-d52005542b2c-kube-api-access-96gnn\") pod \"community-operators-xlmcv\" (UID: \"61db55ba-978d-4ac8-8303-d52005542b2c\") " pod="openshift-marketplace/community-operators-xlmcv" Sep 30 12:44:25 crc kubenswrapper[5002]: I0930 12:44:25.902151 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xlmcv" Sep 30 12:44:26 crc kubenswrapper[5002]: I0930 12:44:26.392897 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xlmcv"] Sep 30 12:44:26 crc kubenswrapper[5002]: I0930 12:44:26.579915 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xlmcv" event={"ID":"61db55ba-978d-4ac8-8303-d52005542b2c","Type":"ContainerStarted","Data":"02a058f56510cc89ed59290a07a87a3ec444496b380bff72cdc008bd46785930"} Sep 30 12:44:27 crc kubenswrapper[5002]: I0930 12:44:27.589706 5002 generic.go:334] "Generic (PLEG): container finished" podID="61db55ba-978d-4ac8-8303-d52005542b2c" containerID="1c99e292d9cf689702fbf0a441b7dc9c673def8503789562c5ea4ae07803cdfe" exitCode=0 Sep 30 12:44:27 crc kubenswrapper[5002]: I0930 12:44:27.589854 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xlmcv" event={"ID":"61db55ba-978d-4ac8-8303-d52005542b2c","Type":"ContainerDied","Data":"1c99e292d9cf689702fbf0a441b7dc9c673def8503789562c5ea4ae07803cdfe"} Sep 30 12:44:29 crc kubenswrapper[5002]: I0930 12:44:29.607023 5002 generic.go:334] "Generic (PLEG): container finished" podID="61db55ba-978d-4ac8-8303-d52005542b2c" containerID="52046b1796971f00e1a2fc1b582c1d474257e47682eff1a5391232617658e7b6" exitCode=0 Sep 30 12:44:29 crc kubenswrapper[5002]: I0930 12:44:29.607190 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xlmcv" event={"ID":"61db55ba-978d-4ac8-8303-d52005542b2c","Type":"ContainerDied","Data":"52046b1796971f00e1a2fc1b582c1d474257e47682eff1a5391232617658e7b6"} Sep 30 12:44:31 crc kubenswrapper[5002]: I0930 12:44:31.630381 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xlmcv" event={"ID":"61db55ba-978d-4ac8-8303-d52005542b2c","Type":"ContainerStarted","Data":"3d238b39cda72cc6f5c7518bc216ea8c4286e45a0ee858e96ee927cdbfd91d0d"} Sep 30 12:44:32 crc kubenswrapper[5002]: I0930 12:44:32.098006 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:44:32 crc kubenswrapper[5002]: I0930 12:44:32.098081 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:44:33 crc kubenswrapper[5002]: E0930 12:44:33.167311 5002 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod61db55ba_978d_4ac8_8303_d52005542b2c.slice/crio-conmon-1c99e292d9cf689702fbf0a441b7dc9c673def8503789562c5ea4ae07803cdfe.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod61db55ba_978d_4ac8_8303_d52005542b2c.slice/crio-1c99e292d9cf689702fbf0a441b7dc9c673def8503789562c5ea4ae07803cdfe.scope\": RecentStats: unable to find data in memory cache]" Sep 30 12:44:35 crc kubenswrapper[5002]: I0930 12:44:35.903254 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xlmcv" Sep 30 12:44:35 crc kubenswrapper[5002]: I0930 12:44:35.903633 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xlmcv" Sep 30 12:44:35 crc kubenswrapper[5002]: I0930 12:44:35.965796 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xlmcv" Sep 30 12:44:35 crc kubenswrapper[5002]: I0930 12:44:35.989025 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xlmcv" podStartSLOduration=7.831903325 podStartE2EDuration="10.988999467s" podCreationTimestamp="2025-09-30 12:44:25 +0000 UTC" firstStartedPulling="2025-09-30 12:44:27.591778639 +0000 UTC m=+1441.841460785" lastFinishedPulling="2025-09-30 12:44:30.748874771 +0000 UTC m=+1444.998556927" observedRunningTime="2025-09-30 12:44:31.649180113 +0000 UTC m=+1445.898862269" watchObservedRunningTime="2025-09-30 12:44:35.988999467 +0000 UTC m=+1450.238681653" Sep 30 12:44:36 crc kubenswrapper[5002]: I0930 12:44:36.715184 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xlmcv" Sep 30 12:44:36 crc kubenswrapper[5002]: I0930 12:44:36.759198 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xlmcv"] Sep 30 12:44:38 crc kubenswrapper[5002]: I0930 12:44:38.689936 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xlmcv" podUID="61db55ba-978d-4ac8-8303-d52005542b2c" containerName="registry-server" containerID="cri-o://3d238b39cda72cc6f5c7518bc216ea8c4286e45a0ee858e96ee927cdbfd91d0d" gracePeriod=2 Sep 30 12:44:39 crc kubenswrapper[5002]: I0930 12:44:39.664153 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xlmcv" Sep 30 12:44:39 crc kubenswrapper[5002]: I0930 12:44:39.701976 5002 generic.go:334] "Generic (PLEG): container finished" podID="61db55ba-978d-4ac8-8303-d52005542b2c" containerID="3d238b39cda72cc6f5c7518bc216ea8c4286e45a0ee858e96ee927cdbfd91d0d" exitCode=0 Sep 30 12:44:39 crc kubenswrapper[5002]: I0930 12:44:39.702048 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xlmcv" event={"ID":"61db55ba-978d-4ac8-8303-d52005542b2c","Type":"ContainerDied","Data":"3d238b39cda72cc6f5c7518bc216ea8c4286e45a0ee858e96ee927cdbfd91d0d"} Sep 30 12:44:39 crc kubenswrapper[5002]: I0930 12:44:39.702089 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xlmcv" event={"ID":"61db55ba-978d-4ac8-8303-d52005542b2c","Type":"ContainerDied","Data":"02a058f56510cc89ed59290a07a87a3ec444496b380bff72cdc008bd46785930"} Sep 30 12:44:39 crc kubenswrapper[5002]: I0930 12:44:39.702114 5002 scope.go:117] "RemoveContainer" containerID="3d238b39cda72cc6f5c7518bc216ea8c4286e45a0ee858e96ee927cdbfd91d0d" Sep 30 12:44:39 crc kubenswrapper[5002]: I0930 12:44:39.702379 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xlmcv" Sep 30 12:44:39 crc kubenswrapper[5002]: I0930 12:44:39.723637 5002 scope.go:117] "RemoveContainer" containerID="52046b1796971f00e1a2fc1b582c1d474257e47682eff1a5391232617658e7b6" Sep 30 12:44:39 crc kubenswrapper[5002]: I0930 12:44:39.746148 5002 scope.go:117] "RemoveContainer" containerID="1c99e292d9cf689702fbf0a441b7dc9c673def8503789562c5ea4ae07803cdfe" Sep 30 12:44:39 crc kubenswrapper[5002]: I0930 12:44:39.753072 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61db55ba-978d-4ac8-8303-d52005542b2c-utilities\") pod \"61db55ba-978d-4ac8-8303-d52005542b2c\" (UID: \"61db55ba-978d-4ac8-8303-d52005542b2c\") " Sep 30 12:44:39 crc kubenswrapper[5002]: I0930 12:44:39.753180 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61db55ba-978d-4ac8-8303-d52005542b2c-catalog-content\") pod \"61db55ba-978d-4ac8-8303-d52005542b2c\" (UID: \"61db55ba-978d-4ac8-8303-d52005542b2c\") " Sep 30 12:44:39 crc kubenswrapper[5002]: I0930 12:44:39.753212 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96gnn\" (UniqueName: \"kubernetes.io/projected/61db55ba-978d-4ac8-8303-d52005542b2c-kube-api-access-96gnn\") pod \"61db55ba-978d-4ac8-8303-d52005542b2c\" (UID: \"61db55ba-978d-4ac8-8303-d52005542b2c\") " Sep 30 12:44:39 crc kubenswrapper[5002]: I0930 12:44:39.754318 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61db55ba-978d-4ac8-8303-d52005542b2c-utilities" (OuterVolumeSpecName: "utilities") pod "61db55ba-978d-4ac8-8303-d52005542b2c" (UID: "61db55ba-978d-4ac8-8303-d52005542b2c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:44:39 crc kubenswrapper[5002]: I0930 12:44:39.754834 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61db55ba-978d-4ac8-8303-d52005542b2c-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 12:44:39 crc kubenswrapper[5002]: I0930 12:44:39.759442 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61db55ba-978d-4ac8-8303-d52005542b2c-kube-api-access-96gnn" (OuterVolumeSpecName: "kube-api-access-96gnn") pod "61db55ba-978d-4ac8-8303-d52005542b2c" (UID: "61db55ba-978d-4ac8-8303-d52005542b2c"). InnerVolumeSpecName "kube-api-access-96gnn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:44:39 crc kubenswrapper[5002]: I0930 12:44:39.806574 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61db55ba-978d-4ac8-8303-d52005542b2c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "61db55ba-978d-4ac8-8303-d52005542b2c" (UID: "61db55ba-978d-4ac8-8303-d52005542b2c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:44:39 crc kubenswrapper[5002]: I0930 12:44:39.840334 5002 scope.go:117] "RemoveContainer" containerID="3d238b39cda72cc6f5c7518bc216ea8c4286e45a0ee858e96ee927cdbfd91d0d" Sep 30 12:44:39 crc kubenswrapper[5002]: E0930 12:44:39.840825 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d238b39cda72cc6f5c7518bc216ea8c4286e45a0ee858e96ee927cdbfd91d0d\": container with ID starting with 3d238b39cda72cc6f5c7518bc216ea8c4286e45a0ee858e96ee927cdbfd91d0d not found: ID does not exist" containerID="3d238b39cda72cc6f5c7518bc216ea8c4286e45a0ee858e96ee927cdbfd91d0d" Sep 30 12:44:39 crc kubenswrapper[5002]: I0930 12:44:39.840866 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d238b39cda72cc6f5c7518bc216ea8c4286e45a0ee858e96ee927cdbfd91d0d"} err="failed to get container status \"3d238b39cda72cc6f5c7518bc216ea8c4286e45a0ee858e96ee927cdbfd91d0d\": rpc error: code = NotFound desc = could not find container \"3d238b39cda72cc6f5c7518bc216ea8c4286e45a0ee858e96ee927cdbfd91d0d\": container with ID starting with 3d238b39cda72cc6f5c7518bc216ea8c4286e45a0ee858e96ee927cdbfd91d0d not found: ID does not exist" Sep 30 12:44:39 crc kubenswrapper[5002]: I0930 12:44:39.840893 5002 scope.go:117] "RemoveContainer" containerID="52046b1796971f00e1a2fc1b582c1d474257e47682eff1a5391232617658e7b6" Sep 30 12:44:39 crc kubenswrapper[5002]: E0930 12:44:39.841135 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52046b1796971f00e1a2fc1b582c1d474257e47682eff1a5391232617658e7b6\": container with ID starting with 52046b1796971f00e1a2fc1b582c1d474257e47682eff1a5391232617658e7b6 not found: ID does not exist" containerID="52046b1796971f00e1a2fc1b582c1d474257e47682eff1a5391232617658e7b6" Sep 30 12:44:39 crc kubenswrapper[5002]: I0930 12:44:39.841166 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52046b1796971f00e1a2fc1b582c1d474257e47682eff1a5391232617658e7b6"} err="failed to get container status \"52046b1796971f00e1a2fc1b582c1d474257e47682eff1a5391232617658e7b6\": rpc error: code = NotFound desc = could not find container \"52046b1796971f00e1a2fc1b582c1d474257e47682eff1a5391232617658e7b6\": container with ID starting with 52046b1796971f00e1a2fc1b582c1d474257e47682eff1a5391232617658e7b6 not found: ID does not exist" Sep 30 12:44:39 crc kubenswrapper[5002]: I0930 12:44:39.841185 5002 scope.go:117] "RemoveContainer" containerID="1c99e292d9cf689702fbf0a441b7dc9c673def8503789562c5ea4ae07803cdfe" Sep 30 12:44:39 crc kubenswrapper[5002]: E0930 12:44:39.841446 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c99e292d9cf689702fbf0a441b7dc9c673def8503789562c5ea4ae07803cdfe\": container with ID starting with 1c99e292d9cf689702fbf0a441b7dc9c673def8503789562c5ea4ae07803cdfe not found: ID does not exist" containerID="1c99e292d9cf689702fbf0a441b7dc9c673def8503789562c5ea4ae07803cdfe" Sep 30 12:44:39 crc kubenswrapper[5002]: I0930 12:44:39.841492 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c99e292d9cf689702fbf0a441b7dc9c673def8503789562c5ea4ae07803cdfe"} err="failed to get container status \"1c99e292d9cf689702fbf0a441b7dc9c673def8503789562c5ea4ae07803cdfe\": rpc error: code = NotFound desc = could not find container \"1c99e292d9cf689702fbf0a441b7dc9c673def8503789562c5ea4ae07803cdfe\": container with ID starting with 1c99e292d9cf689702fbf0a441b7dc9c673def8503789562c5ea4ae07803cdfe not found: ID does not exist" Sep 30 12:44:39 crc kubenswrapper[5002]: I0930 12:44:39.856075 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61db55ba-978d-4ac8-8303-d52005542b2c-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 12:44:39 crc kubenswrapper[5002]: I0930 12:44:39.856118 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96gnn\" (UniqueName: \"kubernetes.io/projected/61db55ba-978d-4ac8-8303-d52005542b2c-kube-api-access-96gnn\") on node \"crc\" DevicePath \"\"" Sep 30 12:44:40 crc kubenswrapper[5002]: I0930 12:44:40.039256 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xlmcv"] Sep 30 12:44:40 crc kubenswrapper[5002]: I0930 12:44:40.048931 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xlmcv"] Sep 30 12:44:40 crc kubenswrapper[5002]: I0930 12:44:40.687841 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61db55ba-978d-4ac8-8303-d52005542b2c" path="/var/lib/kubelet/pods/61db55ba-978d-4ac8-8303-d52005542b2c/volumes" Sep 30 12:44:43 crc kubenswrapper[5002]: E0930 12:44:43.423940 5002 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod61db55ba_978d_4ac8_8303_d52005542b2c.slice/crio-conmon-1c99e292d9cf689702fbf0a441b7dc9c673def8503789562c5ea4ae07803cdfe.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod61db55ba_978d_4ac8_8303_d52005542b2c.slice/crio-1c99e292d9cf689702fbf0a441b7dc9c673def8503789562c5ea4ae07803cdfe.scope\": RecentStats: unable to find data in memory cache]" Sep 30 12:44:53 crc kubenswrapper[5002]: E0930 12:44:53.681323 5002 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod61db55ba_978d_4ac8_8303_d52005542b2c.slice/crio-1c99e292d9cf689702fbf0a441b7dc9c673def8503789562c5ea4ae07803cdfe.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod61db55ba_978d_4ac8_8303_d52005542b2c.slice/crio-conmon-1c99e292d9cf689702fbf0a441b7dc9c673def8503789562c5ea4ae07803cdfe.scope\": RecentStats: unable to find data in memory cache]" Sep 30 12:44:56 crc kubenswrapper[5002]: I0930 12:44:56.279971 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-97qdd"] Sep 30 12:44:56 crc kubenswrapper[5002]: E0930 12:44:56.280903 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61db55ba-978d-4ac8-8303-d52005542b2c" containerName="extract-content" Sep 30 12:44:56 crc kubenswrapper[5002]: I0930 12:44:56.280919 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="61db55ba-978d-4ac8-8303-d52005542b2c" containerName="extract-content" Sep 30 12:44:56 crc kubenswrapper[5002]: E0930 12:44:56.280934 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61db55ba-978d-4ac8-8303-d52005542b2c" containerName="extract-utilities" Sep 30 12:44:56 crc kubenswrapper[5002]: I0930 12:44:56.280943 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="61db55ba-978d-4ac8-8303-d52005542b2c" containerName="extract-utilities" Sep 30 12:44:56 crc kubenswrapper[5002]: E0930 12:44:56.280958 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61db55ba-978d-4ac8-8303-d52005542b2c" containerName="registry-server" Sep 30 12:44:56 crc kubenswrapper[5002]: I0930 12:44:56.280967 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="61db55ba-978d-4ac8-8303-d52005542b2c" containerName="registry-server" Sep 30 12:44:56 crc kubenswrapper[5002]: I0930 12:44:56.281199 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="61db55ba-978d-4ac8-8303-d52005542b2c" containerName="registry-server" Sep 30 12:44:56 crc kubenswrapper[5002]: I0930 12:44:56.283323 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-97qdd" Sep 30 12:44:56 crc kubenswrapper[5002]: I0930 12:44:56.315358 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-97qdd"] Sep 30 12:44:56 crc kubenswrapper[5002]: I0930 12:44:56.375447 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b7af2e5-f533-4313-870e-be0cd4bc7762-catalog-content\") pod \"certified-operators-97qdd\" (UID: \"5b7af2e5-f533-4313-870e-be0cd4bc7762\") " pod="openshift-marketplace/certified-operators-97qdd" Sep 30 12:44:56 crc kubenswrapper[5002]: I0930 12:44:56.375515 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b7af2e5-f533-4313-870e-be0cd4bc7762-utilities\") pod \"certified-operators-97qdd\" (UID: \"5b7af2e5-f533-4313-870e-be0cd4bc7762\") " pod="openshift-marketplace/certified-operators-97qdd" Sep 30 12:44:56 crc kubenswrapper[5002]: I0930 12:44:56.375549 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssbrw\" (UniqueName: \"kubernetes.io/projected/5b7af2e5-f533-4313-870e-be0cd4bc7762-kube-api-access-ssbrw\") pod \"certified-operators-97qdd\" (UID: \"5b7af2e5-f533-4313-870e-be0cd4bc7762\") " pod="openshift-marketplace/certified-operators-97qdd" Sep 30 12:44:56 crc kubenswrapper[5002]: I0930 12:44:56.477179 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b7af2e5-f533-4313-870e-be0cd4bc7762-catalog-content\") pod \"certified-operators-97qdd\" (UID: \"5b7af2e5-f533-4313-870e-be0cd4bc7762\") " pod="openshift-marketplace/certified-operators-97qdd" Sep 30 12:44:56 crc kubenswrapper[5002]: I0930 12:44:56.477229 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b7af2e5-f533-4313-870e-be0cd4bc7762-utilities\") pod \"certified-operators-97qdd\" (UID: \"5b7af2e5-f533-4313-870e-be0cd4bc7762\") " pod="openshift-marketplace/certified-operators-97qdd" Sep 30 12:44:56 crc kubenswrapper[5002]: I0930 12:44:56.477263 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssbrw\" (UniqueName: \"kubernetes.io/projected/5b7af2e5-f533-4313-870e-be0cd4bc7762-kube-api-access-ssbrw\") pod \"certified-operators-97qdd\" (UID: \"5b7af2e5-f533-4313-870e-be0cd4bc7762\") " pod="openshift-marketplace/certified-operators-97qdd" Sep 30 12:44:56 crc kubenswrapper[5002]: I0930 12:44:56.477773 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b7af2e5-f533-4313-870e-be0cd4bc7762-utilities\") pod \"certified-operators-97qdd\" (UID: \"5b7af2e5-f533-4313-870e-be0cd4bc7762\") " pod="openshift-marketplace/certified-operators-97qdd" Sep 30 12:44:56 crc kubenswrapper[5002]: I0930 12:44:56.478069 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b7af2e5-f533-4313-870e-be0cd4bc7762-catalog-content\") pod \"certified-operators-97qdd\" (UID: \"5b7af2e5-f533-4313-870e-be0cd4bc7762\") " pod="openshift-marketplace/certified-operators-97qdd" Sep 30 12:44:56 crc kubenswrapper[5002]: I0930 12:44:56.497278 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssbrw\" (UniqueName: \"kubernetes.io/projected/5b7af2e5-f533-4313-870e-be0cd4bc7762-kube-api-access-ssbrw\") pod \"certified-operators-97qdd\" (UID: \"5b7af2e5-f533-4313-870e-be0cd4bc7762\") " pod="openshift-marketplace/certified-operators-97qdd" Sep 30 12:44:56 crc kubenswrapper[5002]: I0930 12:44:56.623688 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-97qdd" Sep 30 12:44:57 crc kubenswrapper[5002]: I0930 12:44:57.165571 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-97qdd"] Sep 30 12:44:57 crc kubenswrapper[5002]: I0930 12:44:57.886902 5002 generic.go:334] "Generic (PLEG): container finished" podID="5b7af2e5-f533-4313-870e-be0cd4bc7762" containerID="79d2a43048a8654f31871f8fadfef8c43c85bcfc8953854d57d5978dea1a1307" exitCode=0 Sep 30 12:44:57 crc kubenswrapper[5002]: I0930 12:44:57.887186 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-97qdd" event={"ID":"5b7af2e5-f533-4313-870e-be0cd4bc7762","Type":"ContainerDied","Data":"79d2a43048a8654f31871f8fadfef8c43c85bcfc8953854d57d5978dea1a1307"} Sep 30 12:44:57 crc kubenswrapper[5002]: I0930 12:44:57.887216 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-97qdd" event={"ID":"5b7af2e5-f533-4313-870e-be0cd4bc7762","Type":"ContainerStarted","Data":"a51e82c4224fd89b0cb5361f5c24c1a193b75b29a92c223236b1044d16f67309"} Sep 30 12:44:59 crc kubenswrapper[5002]: I0930 12:44:59.908136 5002 generic.go:334] "Generic (PLEG): container finished" podID="5b7af2e5-f533-4313-870e-be0cd4bc7762" containerID="81cdcc88351aecc1c9344f3a54551d2db3bb2a773e1ca3c4d4871fa63729ad55" exitCode=0 Sep 30 12:44:59 crc kubenswrapper[5002]: I0930 12:44:59.908199 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-97qdd" event={"ID":"5b7af2e5-f533-4313-870e-be0cd4bc7762","Type":"ContainerDied","Data":"81cdcc88351aecc1c9344f3a54551d2db3bb2a773e1ca3c4d4871fa63729ad55"} Sep 30 12:45:00 crc kubenswrapper[5002]: I0930 12:45:00.144822 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320605-zltsw"] Sep 30 12:45:00 crc kubenswrapper[5002]: I0930 12:45:00.146745 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320605-zltsw" Sep 30 12:45:00 crc kubenswrapper[5002]: I0930 12:45:00.149555 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 12:45:00 crc kubenswrapper[5002]: I0930 12:45:00.151770 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 12:45:00 crc kubenswrapper[5002]: I0930 12:45:00.155066 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320605-zltsw"] Sep 30 12:45:00 crc kubenswrapper[5002]: I0930 12:45:00.251553 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8-config-volume\") pod \"collect-profiles-29320605-zltsw\" (UID: \"d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320605-zltsw" Sep 30 12:45:00 crc kubenswrapper[5002]: I0930 12:45:00.251705 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8-secret-volume\") pod \"collect-profiles-29320605-zltsw\" (UID: \"d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320605-zltsw" Sep 30 12:45:00 crc kubenswrapper[5002]: I0930 12:45:00.251757 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qpbxm\" (UniqueName: \"kubernetes.io/projected/d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8-kube-api-access-qpbxm\") pod \"collect-profiles-29320605-zltsw\" (UID: \"d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320605-zltsw" Sep 30 12:45:00 crc kubenswrapper[5002]: I0930 12:45:00.353801 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8-config-volume\") pod \"collect-profiles-29320605-zltsw\" (UID: \"d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320605-zltsw" Sep 30 12:45:00 crc kubenswrapper[5002]: I0930 12:45:00.353973 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8-secret-volume\") pod \"collect-profiles-29320605-zltsw\" (UID: \"d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320605-zltsw" Sep 30 12:45:00 crc kubenswrapper[5002]: I0930 12:45:00.354070 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qpbxm\" (UniqueName: \"kubernetes.io/projected/d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8-kube-api-access-qpbxm\") pod \"collect-profiles-29320605-zltsw\" (UID: \"d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320605-zltsw" Sep 30 12:45:00 crc kubenswrapper[5002]: I0930 12:45:00.354991 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8-config-volume\") pod \"collect-profiles-29320605-zltsw\" (UID: \"d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320605-zltsw" Sep 30 12:45:00 crc kubenswrapper[5002]: I0930 12:45:00.363615 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8-secret-volume\") pod \"collect-profiles-29320605-zltsw\" (UID: \"d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320605-zltsw" Sep 30 12:45:00 crc kubenswrapper[5002]: I0930 12:45:00.373673 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qpbxm\" (UniqueName: \"kubernetes.io/projected/d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8-kube-api-access-qpbxm\") pod \"collect-profiles-29320605-zltsw\" (UID: \"d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320605-zltsw" Sep 30 12:45:00 crc kubenswrapper[5002]: I0930 12:45:00.467056 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320605-zltsw" Sep 30 12:45:00 crc kubenswrapper[5002]: I0930 12:45:00.921796 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-97qdd" event={"ID":"5b7af2e5-f533-4313-870e-be0cd4bc7762","Type":"ContainerStarted","Data":"b560a745a429a36a92f6f9a2343fdd59ac00a00bd297ba824fff3f4d8953ccd7"} Sep 30 12:45:00 crc kubenswrapper[5002]: I0930 12:45:00.922963 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320605-zltsw"] Sep 30 12:45:00 crc kubenswrapper[5002]: W0930 12:45:00.924996 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd250b251_cfb4_4df3_90cc_1e8cc5ae1ab8.slice/crio-2e4930cba9674c57b45e14c6ed7f04b9d5a2a19379c9c43d0cd370fc52431d5e WatchSource:0}: Error finding container 2e4930cba9674c57b45e14c6ed7f04b9d5a2a19379c9c43d0cd370fc52431d5e: Status 404 returned error can't find the container with id 2e4930cba9674c57b45e14c6ed7f04b9d5a2a19379c9c43d0cd370fc52431d5e Sep 30 12:45:00 crc kubenswrapper[5002]: I0930 12:45:00.972506 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-97qdd" podStartSLOduration=2.309961885 podStartE2EDuration="4.972456414s" podCreationTimestamp="2025-09-30 12:44:56 +0000 UTC" firstStartedPulling="2025-09-30 12:44:57.889457925 +0000 UTC m=+1472.139140071" lastFinishedPulling="2025-09-30 12:45:00.551952454 +0000 UTC m=+1474.801634600" observedRunningTime="2025-09-30 12:45:00.961518153 +0000 UTC m=+1475.211200319" watchObservedRunningTime="2025-09-30 12:45:00.972456414 +0000 UTC m=+1475.222138570" Sep 30 12:45:01 crc kubenswrapper[5002]: I0930 12:45:01.933365 5002 generic.go:334] "Generic (PLEG): container finished" podID="016126fa-8541-4424-b217-acf7d88e5680" containerID="9909b195785c7e85f31636d3277020e46c9dfd99fc0e1735cf809ee63cc01cdf" exitCode=0 Sep 30 12:45:01 crc kubenswrapper[5002]: I0930 12:45:01.933424 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z" event={"ID":"016126fa-8541-4424-b217-acf7d88e5680","Type":"ContainerDied","Data":"9909b195785c7e85f31636d3277020e46c9dfd99fc0e1735cf809ee63cc01cdf"} Sep 30 12:45:01 crc kubenswrapper[5002]: I0930 12:45:01.935844 5002 generic.go:334] "Generic (PLEG): container finished" podID="d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8" containerID="293c5e86e80662870d4bc86ecc9ec26d67bbb2f69cfe6ed045964a463b74061a" exitCode=0 Sep 30 12:45:01 crc kubenswrapper[5002]: I0930 12:45:01.935893 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320605-zltsw" event={"ID":"d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8","Type":"ContainerDied","Data":"293c5e86e80662870d4bc86ecc9ec26d67bbb2f69cfe6ed045964a463b74061a"} Sep 30 12:45:01 crc kubenswrapper[5002]: I0930 12:45:01.935930 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320605-zltsw" event={"ID":"d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8","Type":"ContainerStarted","Data":"2e4930cba9674c57b45e14c6ed7f04b9d5a2a19379c9c43d0cd370fc52431d5e"} Sep 30 12:45:02 crc kubenswrapper[5002]: I0930 12:45:02.099142 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:45:02 crc kubenswrapper[5002]: I0930 12:45:02.099200 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:45:02 crc kubenswrapper[5002]: I0930 12:45:02.099248 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" Sep 30 12:45:02 crc kubenswrapper[5002]: I0930 12:45:02.100486 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677"} pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 12:45:02 crc kubenswrapper[5002]: I0930 12:45:02.100550 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" containerID="cri-o://a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677" gracePeriod=600 Sep 30 12:45:02 crc kubenswrapper[5002]: E0930 12:45:02.242782 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:45:02 crc kubenswrapper[5002]: I0930 12:45:02.948717 5002 generic.go:334] "Generic (PLEG): container finished" podID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerID="a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677" exitCode=0 Sep 30 12:45:02 crc kubenswrapper[5002]: I0930 12:45:02.948785 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerDied","Data":"a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677"} Sep 30 12:45:02 crc kubenswrapper[5002]: I0930 12:45:02.948826 5002 scope.go:117] "RemoveContainer" containerID="5f3022bd0e514b5d4d606e295722375190cfc36100f55199d7e8623cc30f07d7" Sep 30 12:45:02 crc kubenswrapper[5002]: I0930 12:45:02.949723 5002 scope.go:117] "RemoveContainer" containerID="a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677" Sep 30 12:45:02 crc kubenswrapper[5002]: E0930 12:45:02.950016 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:45:03 crc kubenswrapper[5002]: I0930 12:45:03.401402 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320605-zltsw" Sep 30 12:45:03 crc kubenswrapper[5002]: I0930 12:45:03.410983 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z" Sep 30 12:45:03 crc kubenswrapper[5002]: I0930 12:45:03.516203 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8-secret-volume\") pod \"d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8\" (UID: \"d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8\") " Sep 30 12:45:03 crc kubenswrapper[5002]: I0930 12:45:03.516282 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/016126fa-8541-4424-b217-acf7d88e5680-inventory\") pod \"016126fa-8541-4424-b217-acf7d88e5680\" (UID: \"016126fa-8541-4424-b217-acf7d88e5680\") " Sep 30 12:45:03 crc kubenswrapper[5002]: I0930 12:45:03.516349 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/016126fa-8541-4424-b217-acf7d88e5680-ssh-key\") pod \"016126fa-8541-4424-b217-acf7d88e5680\" (UID: \"016126fa-8541-4424-b217-acf7d88e5680\") " Sep 30 12:45:03 crc kubenswrapper[5002]: I0930 12:45:03.516403 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f77ch\" (UniqueName: \"kubernetes.io/projected/016126fa-8541-4424-b217-acf7d88e5680-kube-api-access-f77ch\") pod \"016126fa-8541-4424-b217-acf7d88e5680\" (UID: \"016126fa-8541-4424-b217-acf7d88e5680\") " Sep 30 12:45:03 crc kubenswrapper[5002]: I0930 12:45:03.516532 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8-config-volume\") pod \"d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8\" (UID: \"d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8\") " Sep 30 12:45:03 crc kubenswrapper[5002]: I0930 12:45:03.516625 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qpbxm\" (UniqueName: \"kubernetes.io/projected/d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8-kube-api-access-qpbxm\") pod \"d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8\" (UID: \"d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8\") " Sep 30 12:45:03 crc kubenswrapper[5002]: I0930 12:45:03.516654 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/016126fa-8541-4424-b217-acf7d88e5680-bootstrap-combined-ca-bundle\") pod \"016126fa-8541-4424-b217-acf7d88e5680\" (UID: \"016126fa-8541-4424-b217-acf7d88e5680\") " Sep 30 12:45:03 crc kubenswrapper[5002]: I0930 12:45:03.517325 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8-config-volume" (OuterVolumeSpecName: "config-volume") pod "d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8" (UID: "d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:45:03 crc kubenswrapper[5002]: I0930 12:45:03.518119 5002 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 12:45:03 crc kubenswrapper[5002]: I0930 12:45:03.524249 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/016126fa-8541-4424-b217-acf7d88e5680-kube-api-access-f77ch" (OuterVolumeSpecName: "kube-api-access-f77ch") pod "016126fa-8541-4424-b217-acf7d88e5680" (UID: "016126fa-8541-4424-b217-acf7d88e5680"). InnerVolumeSpecName "kube-api-access-f77ch". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:45:03 crc kubenswrapper[5002]: I0930 12:45:03.524750 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8-kube-api-access-qpbxm" (OuterVolumeSpecName: "kube-api-access-qpbxm") pod "d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8" (UID: "d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8"). InnerVolumeSpecName "kube-api-access-qpbxm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:45:03 crc kubenswrapper[5002]: I0930 12:45:03.525263 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8" (UID: "d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:45:03 crc kubenswrapper[5002]: I0930 12:45:03.525679 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/016126fa-8541-4424-b217-acf7d88e5680-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "016126fa-8541-4424-b217-acf7d88e5680" (UID: "016126fa-8541-4424-b217-acf7d88e5680"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:45:03 crc kubenswrapper[5002]: I0930 12:45:03.548018 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/016126fa-8541-4424-b217-acf7d88e5680-inventory" (OuterVolumeSpecName: "inventory") pod "016126fa-8541-4424-b217-acf7d88e5680" (UID: "016126fa-8541-4424-b217-acf7d88e5680"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:45:03 crc kubenswrapper[5002]: I0930 12:45:03.549915 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/016126fa-8541-4424-b217-acf7d88e5680-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "016126fa-8541-4424-b217-acf7d88e5680" (UID: "016126fa-8541-4424-b217-acf7d88e5680"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:45:03 crc kubenswrapper[5002]: I0930 12:45:03.619786 5002 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/016126fa-8541-4424-b217-acf7d88e5680-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 12:45:03 crc kubenswrapper[5002]: I0930 12:45:03.619833 5002 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/016126fa-8541-4424-b217-acf7d88e5680-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 12:45:03 crc kubenswrapper[5002]: I0930 12:45:03.619846 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f77ch\" (UniqueName: \"kubernetes.io/projected/016126fa-8541-4424-b217-acf7d88e5680-kube-api-access-f77ch\") on node \"crc\" DevicePath \"\"" Sep 30 12:45:03 crc kubenswrapper[5002]: I0930 12:45:03.619861 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qpbxm\" (UniqueName: \"kubernetes.io/projected/d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8-kube-api-access-qpbxm\") on node \"crc\" DevicePath \"\"" Sep 30 12:45:03 crc kubenswrapper[5002]: I0930 12:45:03.619872 5002 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/016126fa-8541-4424-b217-acf7d88e5680-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:45:03 crc kubenswrapper[5002]: I0930 12:45:03.619884 5002 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 12:45:03 crc kubenswrapper[5002]: I0930 12:45:03.960534 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z" event={"ID":"016126fa-8541-4424-b217-acf7d88e5680","Type":"ContainerDied","Data":"34165d7b717e24d84e907ac2b7ae43bbe8eed8a01658b901351d9db2d95c2ed1"} Sep 30 12:45:03 crc kubenswrapper[5002]: I0930 12:45:03.960895 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="34165d7b717e24d84e907ac2b7ae43bbe8eed8a01658b901351d9db2d95c2ed1" Sep 30 12:45:03 crc kubenswrapper[5002]: I0930 12:45:03.961166 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z" Sep 30 12:45:03 crc kubenswrapper[5002]: E0930 12:45:03.963659 5002 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod61db55ba_978d_4ac8_8303_d52005542b2c.slice/crio-conmon-1c99e292d9cf689702fbf0a441b7dc9c673def8503789562c5ea4ae07803cdfe.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod61db55ba_978d_4ac8_8303_d52005542b2c.slice/crio-1c99e292d9cf689702fbf0a441b7dc9c673def8503789562c5ea4ae07803cdfe.scope\": RecentStats: unable to find data in memory cache]" Sep 30 12:45:03 crc kubenswrapper[5002]: I0930 12:45:03.964806 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320605-zltsw" event={"ID":"d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8","Type":"ContainerDied","Data":"2e4930cba9674c57b45e14c6ed7f04b9d5a2a19379c9c43d0cd370fc52431d5e"} Sep 30 12:45:03 crc kubenswrapper[5002]: I0930 12:45:03.964848 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2e4930cba9674c57b45e14c6ed7f04b9d5a2a19379c9c43d0cd370fc52431d5e" Sep 30 12:45:03 crc kubenswrapper[5002]: I0930 12:45:03.965007 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320605-zltsw" Sep 30 12:45:04 crc kubenswrapper[5002]: I0930 12:45:04.022852 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2"] Sep 30 12:45:04 crc kubenswrapper[5002]: E0930 12:45:04.023319 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="016126fa-8541-4424-b217-acf7d88e5680" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 30 12:45:04 crc kubenswrapper[5002]: I0930 12:45:04.023334 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="016126fa-8541-4424-b217-acf7d88e5680" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 30 12:45:04 crc kubenswrapper[5002]: E0930 12:45:04.023360 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8" containerName="collect-profiles" Sep 30 12:45:04 crc kubenswrapper[5002]: I0930 12:45:04.023366 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8" containerName="collect-profiles" Sep 30 12:45:04 crc kubenswrapper[5002]: I0930 12:45:04.023576 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="016126fa-8541-4424-b217-acf7d88e5680" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 30 12:45:04 crc kubenswrapper[5002]: I0930 12:45:04.023599 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8" containerName="collect-profiles" Sep 30 12:45:04 crc kubenswrapper[5002]: I0930 12:45:04.024242 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2" Sep 30 12:45:04 crc kubenswrapper[5002]: I0930 12:45:04.027119 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 12:45:04 crc kubenswrapper[5002]: I0930 12:45:04.027674 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 12:45:04 crc kubenswrapper[5002]: I0930 12:45:04.027715 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 12:45:04 crc kubenswrapper[5002]: I0930 12:45:04.027811 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-ddhrk" Sep 30 12:45:04 crc kubenswrapper[5002]: I0930 12:45:04.033281 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2"] Sep 30 12:45:04 crc kubenswrapper[5002]: I0930 12:45:04.127505 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5389ead8-63bd-4216-9653-48519fa391fb-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2\" (UID: \"5389ead8-63bd-4216-9653-48519fa391fb\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2" Sep 30 12:45:04 crc kubenswrapper[5002]: I0930 12:45:04.127554 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5389ead8-63bd-4216-9653-48519fa391fb-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2\" (UID: \"5389ead8-63bd-4216-9653-48519fa391fb\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2" Sep 30 12:45:04 crc kubenswrapper[5002]: I0930 12:45:04.127595 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxb5r\" (UniqueName: \"kubernetes.io/projected/5389ead8-63bd-4216-9653-48519fa391fb-kube-api-access-pxb5r\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2\" (UID: \"5389ead8-63bd-4216-9653-48519fa391fb\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2" Sep 30 12:45:04 crc kubenswrapper[5002]: I0930 12:45:04.229572 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5389ead8-63bd-4216-9653-48519fa391fb-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2\" (UID: \"5389ead8-63bd-4216-9653-48519fa391fb\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2" Sep 30 12:45:04 crc kubenswrapper[5002]: I0930 12:45:04.229630 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5389ead8-63bd-4216-9653-48519fa391fb-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2\" (UID: \"5389ead8-63bd-4216-9653-48519fa391fb\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2" Sep 30 12:45:04 crc kubenswrapper[5002]: I0930 12:45:04.229684 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxb5r\" (UniqueName: \"kubernetes.io/projected/5389ead8-63bd-4216-9653-48519fa391fb-kube-api-access-pxb5r\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2\" (UID: \"5389ead8-63bd-4216-9653-48519fa391fb\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2" Sep 30 12:45:04 crc kubenswrapper[5002]: I0930 12:45:04.234368 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5389ead8-63bd-4216-9653-48519fa391fb-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2\" (UID: \"5389ead8-63bd-4216-9653-48519fa391fb\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2" Sep 30 12:45:04 crc kubenswrapper[5002]: I0930 12:45:04.235148 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5389ead8-63bd-4216-9653-48519fa391fb-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2\" (UID: \"5389ead8-63bd-4216-9653-48519fa391fb\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2" Sep 30 12:45:04 crc kubenswrapper[5002]: I0930 12:45:04.246365 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxb5r\" (UniqueName: \"kubernetes.io/projected/5389ead8-63bd-4216-9653-48519fa391fb-kube-api-access-pxb5r\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2\" (UID: \"5389ead8-63bd-4216-9653-48519fa391fb\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2" Sep 30 12:45:04 crc kubenswrapper[5002]: I0930 12:45:04.341555 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2" Sep 30 12:45:04 crc kubenswrapper[5002]: I0930 12:45:04.882043 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2"] Sep 30 12:45:04 crc kubenswrapper[5002]: W0930 12:45:04.882626 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5389ead8_63bd_4216_9653_48519fa391fb.slice/crio-01650cb58800fd2e5ac723fbb8cfcc702ae424055fffc6e3d8400aecc755a66f WatchSource:0}: Error finding container 01650cb58800fd2e5ac723fbb8cfcc702ae424055fffc6e3d8400aecc755a66f: Status 404 returned error can't find the container with id 01650cb58800fd2e5ac723fbb8cfcc702ae424055fffc6e3d8400aecc755a66f Sep 30 12:45:04 crc kubenswrapper[5002]: I0930 12:45:04.974321 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2" event={"ID":"5389ead8-63bd-4216-9653-48519fa391fb","Type":"ContainerStarted","Data":"01650cb58800fd2e5ac723fbb8cfcc702ae424055fffc6e3d8400aecc755a66f"} Sep 30 12:45:05 crc kubenswrapper[5002]: I0930 12:45:05.988898 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2" event={"ID":"5389ead8-63bd-4216-9653-48519fa391fb","Type":"ContainerStarted","Data":"41f047cf48a28167643fdf5462fc8556e7f3c6867ef8c216dd94c0d20b80fcfe"} Sep 30 12:45:06 crc kubenswrapper[5002]: I0930 12:45:06.020597 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2" podStartSLOduration=1.455587918 podStartE2EDuration="2.020573438s" podCreationTimestamp="2025-09-30 12:45:04 +0000 UTC" firstStartedPulling="2025-09-30 12:45:04.88542706 +0000 UTC m=+1479.135109206" lastFinishedPulling="2025-09-30 12:45:05.45041258 +0000 UTC m=+1479.700094726" observedRunningTime="2025-09-30 12:45:06.005956051 +0000 UTC m=+1480.255638227" watchObservedRunningTime="2025-09-30 12:45:06.020573438 +0000 UTC m=+1480.270255604" Sep 30 12:45:06 crc kubenswrapper[5002]: I0930 12:45:06.624797 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-97qdd" Sep 30 12:45:06 crc kubenswrapper[5002]: I0930 12:45:06.624963 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-97qdd" Sep 30 12:45:06 crc kubenswrapper[5002]: I0930 12:45:06.674703 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-97qdd" Sep 30 12:45:07 crc kubenswrapper[5002]: I0930 12:45:07.088770 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-97qdd" Sep 30 12:45:07 crc kubenswrapper[5002]: I0930 12:45:07.149537 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-97qdd"] Sep 30 12:45:09 crc kubenswrapper[5002]: I0930 12:45:09.023335 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-97qdd" podUID="5b7af2e5-f533-4313-870e-be0cd4bc7762" containerName="registry-server" containerID="cri-o://b560a745a429a36a92f6f9a2343fdd59ac00a00bd297ba824fff3f4d8953ccd7" gracePeriod=2 Sep 30 12:45:10 crc kubenswrapper[5002]: I0930 12:45:10.001727 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-97qdd" Sep 30 12:45:10 crc kubenswrapper[5002]: I0930 12:45:10.043707 5002 generic.go:334] "Generic (PLEG): container finished" podID="5b7af2e5-f533-4313-870e-be0cd4bc7762" containerID="b560a745a429a36a92f6f9a2343fdd59ac00a00bd297ba824fff3f4d8953ccd7" exitCode=0 Sep 30 12:45:10 crc kubenswrapper[5002]: I0930 12:45:10.043756 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-97qdd" event={"ID":"5b7af2e5-f533-4313-870e-be0cd4bc7762","Type":"ContainerDied","Data":"b560a745a429a36a92f6f9a2343fdd59ac00a00bd297ba824fff3f4d8953ccd7"} Sep 30 12:45:10 crc kubenswrapper[5002]: I0930 12:45:10.043794 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-97qdd" event={"ID":"5b7af2e5-f533-4313-870e-be0cd4bc7762","Type":"ContainerDied","Data":"a51e82c4224fd89b0cb5361f5c24c1a193b75b29a92c223236b1044d16f67309"} Sep 30 12:45:10 crc kubenswrapper[5002]: I0930 12:45:10.043818 5002 scope.go:117] "RemoveContainer" containerID="b560a745a429a36a92f6f9a2343fdd59ac00a00bd297ba824fff3f4d8953ccd7" Sep 30 12:45:10 crc kubenswrapper[5002]: I0930 12:45:10.043809 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-97qdd" Sep 30 12:45:10 crc kubenswrapper[5002]: I0930 12:45:10.070407 5002 scope.go:117] "RemoveContainer" containerID="81cdcc88351aecc1c9344f3a54551d2db3bb2a773e1ca3c4d4871fa63729ad55" Sep 30 12:45:10 crc kubenswrapper[5002]: I0930 12:45:10.098949 5002 scope.go:117] "RemoveContainer" containerID="79d2a43048a8654f31871f8fadfef8c43c85bcfc8953854d57d5978dea1a1307" Sep 30 12:45:10 crc kubenswrapper[5002]: I0930 12:45:10.137773 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b7af2e5-f533-4313-870e-be0cd4bc7762-utilities\") pod \"5b7af2e5-f533-4313-870e-be0cd4bc7762\" (UID: \"5b7af2e5-f533-4313-870e-be0cd4bc7762\") " Sep 30 12:45:10 crc kubenswrapper[5002]: I0930 12:45:10.137778 5002 scope.go:117] "RemoveContainer" containerID="b560a745a429a36a92f6f9a2343fdd59ac00a00bd297ba824fff3f4d8953ccd7" Sep 30 12:45:10 crc kubenswrapper[5002]: I0930 12:45:10.137937 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ssbrw\" (UniqueName: \"kubernetes.io/projected/5b7af2e5-f533-4313-870e-be0cd4bc7762-kube-api-access-ssbrw\") pod \"5b7af2e5-f533-4313-870e-be0cd4bc7762\" (UID: \"5b7af2e5-f533-4313-870e-be0cd4bc7762\") " Sep 30 12:45:10 crc kubenswrapper[5002]: I0930 12:45:10.138228 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b7af2e5-f533-4313-870e-be0cd4bc7762-catalog-content\") pod \"5b7af2e5-f533-4313-870e-be0cd4bc7762\" (UID: \"5b7af2e5-f533-4313-870e-be0cd4bc7762\") " Sep 30 12:45:10 crc kubenswrapper[5002]: I0930 12:45:10.139128 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b7af2e5-f533-4313-870e-be0cd4bc7762-utilities" (OuterVolumeSpecName: "utilities") pod "5b7af2e5-f533-4313-870e-be0cd4bc7762" (UID: "5b7af2e5-f533-4313-870e-be0cd4bc7762"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:45:10 crc kubenswrapper[5002]: I0930 12:45:10.139259 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b7af2e5-f533-4313-870e-be0cd4bc7762-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 12:45:10 crc kubenswrapper[5002]: E0930 12:45:10.139264 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b560a745a429a36a92f6f9a2343fdd59ac00a00bd297ba824fff3f4d8953ccd7\": container with ID starting with b560a745a429a36a92f6f9a2343fdd59ac00a00bd297ba824fff3f4d8953ccd7 not found: ID does not exist" containerID="b560a745a429a36a92f6f9a2343fdd59ac00a00bd297ba824fff3f4d8953ccd7" Sep 30 12:45:10 crc kubenswrapper[5002]: I0930 12:45:10.139317 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b560a745a429a36a92f6f9a2343fdd59ac00a00bd297ba824fff3f4d8953ccd7"} err="failed to get container status \"b560a745a429a36a92f6f9a2343fdd59ac00a00bd297ba824fff3f4d8953ccd7\": rpc error: code = NotFound desc = could not find container \"b560a745a429a36a92f6f9a2343fdd59ac00a00bd297ba824fff3f4d8953ccd7\": container with ID starting with b560a745a429a36a92f6f9a2343fdd59ac00a00bd297ba824fff3f4d8953ccd7 not found: ID does not exist" Sep 30 12:45:10 crc kubenswrapper[5002]: I0930 12:45:10.139347 5002 scope.go:117] "RemoveContainer" containerID="81cdcc88351aecc1c9344f3a54551d2db3bb2a773e1ca3c4d4871fa63729ad55" Sep 30 12:45:10 crc kubenswrapper[5002]: E0930 12:45:10.139733 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81cdcc88351aecc1c9344f3a54551d2db3bb2a773e1ca3c4d4871fa63729ad55\": container with ID starting with 81cdcc88351aecc1c9344f3a54551d2db3bb2a773e1ca3c4d4871fa63729ad55 not found: ID does not exist" containerID="81cdcc88351aecc1c9344f3a54551d2db3bb2a773e1ca3c4d4871fa63729ad55" Sep 30 12:45:10 crc kubenswrapper[5002]: I0930 12:45:10.139767 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81cdcc88351aecc1c9344f3a54551d2db3bb2a773e1ca3c4d4871fa63729ad55"} err="failed to get container status \"81cdcc88351aecc1c9344f3a54551d2db3bb2a773e1ca3c4d4871fa63729ad55\": rpc error: code = NotFound desc = could not find container \"81cdcc88351aecc1c9344f3a54551d2db3bb2a773e1ca3c4d4871fa63729ad55\": container with ID starting with 81cdcc88351aecc1c9344f3a54551d2db3bb2a773e1ca3c4d4871fa63729ad55 not found: ID does not exist" Sep 30 12:45:10 crc kubenswrapper[5002]: I0930 12:45:10.139795 5002 scope.go:117] "RemoveContainer" containerID="79d2a43048a8654f31871f8fadfef8c43c85bcfc8953854d57d5978dea1a1307" Sep 30 12:45:10 crc kubenswrapper[5002]: E0930 12:45:10.140044 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79d2a43048a8654f31871f8fadfef8c43c85bcfc8953854d57d5978dea1a1307\": container with ID starting with 79d2a43048a8654f31871f8fadfef8c43c85bcfc8953854d57d5978dea1a1307 not found: ID does not exist" containerID="79d2a43048a8654f31871f8fadfef8c43c85bcfc8953854d57d5978dea1a1307" Sep 30 12:45:10 crc kubenswrapper[5002]: I0930 12:45:10.140069 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79d2a43048a8654f31871f8fadfef8c43c85bcfc8953854d57d5978dea1a1307"} err="failed to get container status \"79d2a43048a8654f31871f8fadfef8c43c85bcfc8953854d57d5978dea1a1307\": rpc error: code = NotFound desc = could not find container \"79d2a43048a8654f31871f8fadfef8c43c85bcfc8953854d57d5978dea1a1307\": container with ID starting with 79d2a43048a8654f31871f8fadfef8c43c85bcfc8953854d57d5978dea1a1307 not found: ID does not exist" Sep 30 12:45:10 crc kubenswrapper[5002]: I0930 12:45:10.144169 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b7af2e5-f533-4313-870e-be0cd4bc7762-kube-api-access-ssbrw" (OuterVolumeSpecName: "kube-api-access-ssbrw") pod "5b7af2e5-f533-4313-870e-be0cd4bc7762" (UID: "5b7af2e5-f533-4313-870e-be0cd4bc7762"). InnerVolumeSpecName "kube-api-access-ssbrw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:45:10 crc kubenswrapper[5002]: I0930 12:45:10.187232 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b7af2e5-f533-4313-870e-be0cd4bc7762-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5b7af2e5-f533-4313-870e-be0cd4bc7762" (UID: "5b7af2e5-f533-4313-870e-be0cd4bc7762"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:45:10 crc kubenswrapper[5002]: I0930 12:45:10.241447 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ssbrw\" (UniqueName: \"kubernetes.io/projected/5b7af2e5-f533-4313-870e-be0cd4bc7762-kube-api-access-ssbrw\") on node \"crc\" DevicePath \"\"" Sep 30 12:45:10 crc kubenswrapper[5002]: I0930 12:45:10.241528 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b7af2e5-f533-4313-870e-be0cd4bc7762-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 12:45:10 crc kubenswrapper[5002]: I0930 12:45:10.391522 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-97qdd"] Sep 30 12:45:10 crc kubenswrapper[5002]: I0930 12:45:10.400273 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-97qdd"] Sep 30 12:45:10 crc kubenswrapper[5002]: I0930 12:45:10.688872 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b7af2e5-f533-4313-870e-be0cd4bc7762" path="/var/lib/kubelet/pods/5b7af2e5-f533-4313-870e-be0cd4bc7762/volumes" Sep 30 12:45:14 crc kubenswrapper[5002]: E0930 12:45:14.216377 5002 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod61db55ba_978d_4ac8_8303_d52005542b2c.slice/crio-conmon-1c99e292d9cf689702fbf0a441b7dc9c673def8503789562c5ea4ae07803cdfe.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod61db55ba_978d_4ac8_8303_d52005542b2c.slice/crio-1c99e292d9cf689702fbf0a441b7dc9c673def8503789562c5ea4ae07803cdfe.scope\": RecentStats: unable to find data in memory cache]" Sep 30 12:45:16 crc kubenswrapper[5002]: I0930 12:45:16.689912 5002 scope.go:117] "RemoveContainer" containerID="a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677" Sep 30 12:45:16 crc kubenswrapper[5002]: E0930 12:45:16.690772 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:45:24 crc kubenswrapper[5002]: E0930 12:45:24.510647 5002 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod61db55ba_978d_4ac8_8303_d52005542b2c.slice/crio-1c99e292d9cf689702fbf0a441b7dc9c673def8503789562c5ea4ae07803cdfe.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod61db55ba_978d_4ac8_8303_d52005542b2c.slice/crio-conmon-1c99e292d9cf689702fbf0a441b7dc9c673def8503789562c5ea4ae07803cdfe.scope\": RecentStats: unable to find data in memory cache]" Sep 30 12:45:28 crc kubenswrapper[5002]: I0930 12:45:28.677519 5002 scope.go:117] "RemoveContainer" containerID="a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677" Sep 30 12:45:28 crc kubenswrapper[5002]: E0930 12:45:28.678625 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:45:39 crc kubenswrapper[5002]: I0930 12:45:39.676710 5002 scope.go:117] "RemoveContainer" containerID="a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677" Sep 30 12:45:39 crc kubenswrapper[5002]: E0930 12:45:39.677575 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:45:53 crc kubenswrapper[5002]: I0930 12:45:53.676590 5002 scope.go:117] "RemoveContainer" containerID="a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677" Sep 30 12:45:53 crc kubenswrapper[5002]: E0930 12:45:53.677323 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:45:59 crc kubenswrapper[5002]: I0930 12:45:59.045551 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-6pbv2"] Sep 30 12:45:59 crc kubenswrapper[5002]: I0930 12:45:59.058312 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-6pbv2"] Sep 30 12:46:00 crc kubenswrapper[5002]: I0930 12:46:00.708984 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6bc708e3-0747-40b1-a31e-33e23efd53cd" path="/var/lib/kubelet/pods/6bc708e3-0747-40b1-a31e-33e23efd53cd/volumes" Sep 30 12:46:02 crc kubenswrapper[5002]: I0930 12:46:02.036882 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-jtdpj"] Sep 30 12:46:02 crc kubenswrapper[5002]: I0930 12:46:02.051250 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-gg8d9"] Sep 30 12:46:02 crc kubenswrapper[5002]: I0930 12:46:02.060283 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-gg8d9"] Sep 30 12:46:02 crc kubenswrapper[5002]: I0930 12:46:02.069426 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-jtdpj"] Sep 30 12:46:02 crc kubenswrapper[5002]: I0930 12:46:02.687295 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0d9004a-7962-4f71-94bc-703b87eef783" path="/var/lib/kubelet/pods/a0d9004a-7962-4f71-94bc-703b87eef783/volumes" Sep 30 12:46:02 crc kubenswrapper[5002]: I0930 12:46:02.687858 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bed43e1e-c233-45a1-acf8-6f590063d0c0" path="/var/lib/kubelet/pods/bed43e1e-c233-45a1-acf8-6f590063d0c0/volumes" Sep 30 12:46:04 crc kubenswrapper[5002]: I0930 12:46:04.676655 5002 scope.go:117] "RemoveContainer" containerID="a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677" Sep 30 12:46:04 crc kubenswrapper[5002]: E0930 12:46:04.677278 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:46:12 crc kubenswrapper[5002]: I0930 12:46:12.038645 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-67be-account-create-cl86m"] Sep 30 12:46:12 crc kubenswrapper[5002]: I0930 12:46:12.047895 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-5240-account-create-5zpq6"] Sep 30 12:46:12 crc kubenswrapper[5002]: I0930 12:46:12.055773 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-67be-account-create-cl86m"] Sep 30 12:46:12 crc kubenswrapper[5002]: I0930 12:46:12.062804 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-5240-account-create-5zpq6"] Sep 30 12:46:12 crc kubenswrapper[5002]: I0930 12:46:12.688068 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ad5b572-2368-4cf6-9fd7-345872b76a49" path="/var/lib/kubelet/pods/2ad5b572-2368-4cf6-9fd7-345872b76a49/volumes" Sep 30 12:46:12 crc kubenswrapper[5002]: I0930 12:46:12.689000 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5ec1aa9-0c47-4a59-80c4-79e9d278943d" path="/var/lib/kubelet/pods/f5ec1aa9-0c47-4a59-80c4-79e9d278943d/volumes" Sep 30 12:46:17 crc kubenswrapper[5002]: I0930 12:46:17.032026 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-38df-account-create-cr5rj"] Sep 30 12:46:17 crc kubenswrapper[5002]: I0930 12:46:17.040159 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-38df-account-create-cr5rj"] Sep 30 12:46:17 crc kubenswrapper[5002]: I0930 12:46:17.676253 5002 scope.go:117] "RemoveContainer" containerID="a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677" Sep 30 12:46:17 crc kubenswrapper[5002]: E0930 12:46:17.676535 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:46:18 crc kubenswrapper[5002]: I0930 12:46:18.708788 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="199b9416-df82-435d-b362-1624462e7e79" path="/var/lib/kubelet/pods/199b9416-df82-435d-b362-1624462e7e79/volumes" Sep 30 12:46:28 crc kubenswrapper[5002]: I0930 12:46:28.676827 5002 scope.go:117] "RemoveContainer" containerID="a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677" Sep 30 12:46:28 crc kubenswrapper[5002]: E0930 12:46:28.677459 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:46:34 crc kubenswrapper[5002]: I0930 12:46:34.890149 5002 generic.go:334] "Generic (PLEG): container finished" podID="5389ead8-63bd-4216-9653-48519fa391fb" containerID="41f047cf48a28167643fdf5462fc8556e7f3c6867ef8c216dd94c0d20b80fcfe" exitCode=0 Sep 30 12:46:34 crc kubenswrapper[5002]: I0930 12:46:34.890310 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2" event={"ID":"5389ead8-63bd-4216-9653-48519fa391fb","Type":"ContainerDied","Data":"41f047cf48a28167643fdf5462fc8556e7f3c6867ef8c216dd94c0d20b80fcfe"} Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.040147 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-wq6pr"] Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.048576 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-lz6gs"] Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.057391 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-crf9m"] Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.065922 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-crf9m"] Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.073923 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-wq6pr"] Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.080834 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-lz6gs"] Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.314265 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2" Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.441647 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5389ead8-63bd-4216-9653-48519fa391fb-ssh-key\") pod \"5389ead8-63bd-4216-9653-48519fa391fb\" (UID: \"5389ead8-63bd-4216-9653-48519fa391fb\") " Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.441742 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5389ead8-63bd-4216-9653-48519fa391fb-inventory\") pod \"5389ead8-63bd-4216-9653-48519fa391fb\" (UID: \"5389ead8-63bd-4216-9653-48519fa391fb\") " Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.441784 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pxb5r\" (UniqueName: \"kubernetes.io/projected/5389ead8-63bd-4216-9653-48519fa391fb-kube-api-access-pxb5r\") pod \"5389ead8-63bd-4216-9653-48519fa391fb\" (UID: \"5389ead8-63bd-4216-9653-48519fa391fb\") " Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.448605 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5389ead8-63bd-4216-9653-48519fa391fb-kube-api-access-pxb5r" (OuterVolumeSpecName: "kube-api-access-pxb5r") pod "5389ead8-63bd-4216-9653-48519fa391fb" (UID: "5389ead8-63bd-4216-9653-48519fa391fb"). InnerVolumeSpecName "kube-api-access-pxb5r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.472033 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5389ead8-63bd-4216-9653-48519fa391fb-inventory" (OuterVolumeSpecName: "inventory") pod "5389ead8-63bd-4216-9653-48519fa391fb" (UID: "5389ead8-63bd-4216-9653-48519fa391fb"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.476393 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5389ead8-63bd-4216-9653-48519fa391fb-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5389ead8-63bd-4216-9653-48519fa391fb" (UID: "5389ead8-63bd-4216-9653-48519fa391fb"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.544197 5002 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5389ead8-63bd-4216-9653-48519fa391fb-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.544230 5002 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5389ead8-63bd-4216-9653-48519fa391fb-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.544242 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pxb5r\" (UniqueName: \"kubernetes.io/projected/5389ead8-63bd-4216-9653-48519fa391fb-kube-api-access-pxb5r\") on node \"crc\" DevicePath \"\"" Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.686755 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aeb00f00-434a-44d3-aa16-2ea38921064c" path="/var/lib/kubelet/pods/aeb00f00-434a-44d3-aa16-2ea38921064c/volumes" Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.687283 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3fe9aed-b73f-410d-a50f-dbbb964f3a8c" path="/var/lib/kubelet/pods/b3fe9aed-b73f-410d-a50f-dbbb964f3a8c/volumes" Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.687878 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d006d9db-f5bd-4a8e-b09e-3befefe10c2a" path="/var/lib/kubelet/pods/d006d9db-f5bd-4a8e-b09e-3befefe10c2a/volumes" Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.911583 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2" event={"ID":"5389ead8-63bd-4216-9653-48519fa391fb","Type":"ContainerDied","Data":"01650cb58800fd2e5ac723fbb8cfcc702ae424055fffc6e3d8400aecc755a66f"} Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.911632 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="01650cb58800fd2e5ac723fbb8cfcc702ae424055fffc6e3d8400aecc755a66f" Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.911659 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2" Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.985213 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-p69m6"] Sep 30 12:46:36 crc kubenswrapper[5002]: E0930 12:46:36.985621 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b7af2e5-f533-4313-870e-be0cd4bc7762" containerName="extract-content" Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.985657 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b7af2e5-f533-4313-870e-be0cd4bc7762" containerName="extract-content" Sep 30 12:46:36 crc kubenswrapper[5002]: E0930 12:46:36.985674 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b7af2e5-f533-4313-870e-be0cd4bc7762" containerName="registry-server" Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.985680 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b7af2e5-f533-4313-870e-be0cd4bc7762" containerName="registry-server" Sep 30 12:46:36 crc kubenswrapper[5002]: E0930 12:46:36.985696 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b7af2e5-f533-4313-870e-be0cd4bc7762" containerName="extract-utilities" Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.985703 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b7af2e5-f533-4313-870e-be0cd4bc7762" containerName="extract-utilities" Sep 30 12:46:36 crc kubenswrapper[5002]: E0930 12:46:36.985721 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5389ead8-63bd-4216-9653-48519fa391fb" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.985727 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="5389ead8-63bd-4216-9653-48519fa391fb" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.985890 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b7af2e5-f533-4313-870e-be0cd4bc7762" containerName="registry-server" Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.985907 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="5389ead8-63bd-4216-9653-48519fa391fb" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.986631 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-p69m6" Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.990329 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.990497 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.991062 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.991261 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-ddhrk" Sep 30 12:46:36 crc kubenswrapper[5002]: I0930 12:46:36.994715 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-p69m6"] Sep 30 12:46:37 crc kubenswrapper[5002]: I0930 12:46:37.154699 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0b171fad-8a7f-4271-b1e6-43b03111244d-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-p69m6\" (UID: \"0b171fad-8a7f-4271-b1e6-43b03111244d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-p69m6" Sep 30 12:46:37 crc kubenswrapper[5002]: I0930 12:46:37.154885 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwk9p\" (UniqueName: \"kubernetes.io/projected/0b171fad-8a7f-4271-b1e6-43b03111244d-kube-api-access-vwk9p\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-p69m6\" (UID: \"0b171fad-8a7f-4271-b1e6-43b03111244d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-p69m6" Sep 30 12:46:37 crc kubenswrapper[5002]: I0930 12:46:37.154927 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0b171fad-8a7f-4271-b1e6-43b03111244d-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-p69m6\" (UID: \"0b171fad-8a7f-4271-b1e6-43b03111244d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-p69m6" Sep 30 12:46:37 crc kubenswrapper[5002]: I0930 12:46:37.256784 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0b171fad-8a7f-4271-b1e6-43b03111244d-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-p69m6\" (UID: \"0b171fad-8a7f-4271-b1e6-43b03111244d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-p69m6" Sep 30 12:46:37 crc kubenswrapper[5002]: I0930 12:46:37.257147 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwk9p\" (UniqueName: \"kubernetes.io/projected/0b171fad-8a7f-4271-b1e6-43b03111244d-kube-api-access-vwk9p\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-p69m6\" (UID: \"0b171fad-8a7f-4271-b1e6-43b03111244d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-p69m6" Sep 30 12:46:37 crc kubenswrapper[5002]: I0930 12:46:37.257236 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0b171fad-8a7f-4271-b1e6-43b03111244d-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-p69m6\" (UID: \"0b171fad-8a7f-4271-b1e6-43b03111244d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-p69m6" Sep 30 12:46:37 crc kubenswrapper[5002]: I0930 12:46:37.261664 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0b171fad-8a7f-4271-b1e6-43b03111244d-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-p69m6\" (UID: \"0b171fad-8a7f-4271-b1e6-43b03111244d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-p69m6" Sep 30 12:46:37 crc kubenswrapper[5002]: I0930 12:46:37.261865 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0b171fad-8a7f-4271-b1e6-43b03111244d-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-p69m6\" (UID: \"0b171fad-8a7f-4271-b1e6-43b03111244d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-p69m6" Sep 30 12:46:37 crc kubenswrapper[5002]: I0930 12:46:37.271679 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwk9p\" (UniqueName: \"kubernetes.io/projected/0b171fad-8a7f-4271-b1e6-43b03111244d-kube-api-access-vwk9p\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-p69m6\" (UID: \"0b171fad-8a7f-4271-b1e6-43b03111244d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-p69m6" Sep 30 12:46:37 crc kubenswrapper[5002]: I0930 12:46:37.313259 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-p69m6" Sep 30 12:46:37 crc kubenswrapper[5002]: I0930 12:46:37.805867 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-p69m6"] Sep 30 12:46:37 crc kubenswrapper[5002]: W0930 12:46:37.808545 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0b171fad_8a7f_4271_b1e6_43b03111244d.slice/crio-53d69441b0fe46988cbcd1ec36ced60891ba9822ef1ca40578351174c736cb20 WatchSource:0}: Error finding container 53d69441b0fe46988cbcd1ec36ced60891ba9822ef1ca40578351174c736cb20: Status 404 returned error can't find the container with id 53d69441b0fe46988cbcd1ec36ced60891ba9822ef1ca40578351174c736cb20 Sep 30 12:46:37 crc kubenswrapper[5002]: I0930 12:46:37.811821 5002 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 12:46:37 crc kubenswrapper[5002]: I0930 12:46:37.921030 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-p69m6" event={"ID":"0b171fad-8a7f-4271-b1e6-43b03111244d","Type":"ContainerStarted","Data":"53d69441b0fe46988cbcd1ec36ced60891ba9822ef1ca40578351174c736cb20"} Sep 30 12:46:38 crc kubenswrapper[5002]: I0930 12:46:38.930359 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-p69m6" event={"ID":"0b171fad-8a7f-4271-b1e6-43b03111244d","Type":"ContainerStarted","Data":"ff0bf4ee83f178fa2d5a452f3a8dd4b4bc9e1f4a537be11692721fbf9d5c4770"} Sep 30 12:46:38 crc kubenswrapper[5002]: I0930 12:46:38.951752 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-p69m6" podStartSLOduration=2.451303681 podStartE2EDuration="2.951733478s" podCreationTimestamp="2025-09-30 12:46:36 +0000 UTC" firstStartedPulling="2025-09-30 12:46:37.811531031 +0000 UTC m=+1572.061213177" lastFinishedPulling="2025-09-30 12:46:38.311960828 +0000 UTC m=+1572.561642974" observedRunningTime="2025-09-30 12:46:38.945869237 +0000 UTC m=+1573.195551403" watchObservedRunningTime="2025-09-30 12:46:38.951733478 +0000 UTC m=+1573.201415624" Sep 30 12:46:40 crc kubenswrapper[5002]: I0930 12:46:40.031127 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-cpvxh"] Sep 30 12:46:40 crc kubenswrapper[5002]: I0930 12:46:40.038127 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-cpvxh"] Sep 30 12:46:40 crc kubenswrapper[5002]: I0930 12:46:40.689867 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76838250-6394-4471-8009-e204115dc84c" path="/var/lib/kubelet/pods/76838250-6394-4471-8009-e204115dc84c/volumes" Sep 30 12:46:41 crc kubenswrapper[5002]: I0930 12:46:41.030984 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-2xqnk"] Sep 30 12:46:41 crc kubenswrapper[5002]: I0930 12:46:41.036729 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-2xqnk"] Sep 30 12:46:41 crc kubenswrapper[5002]: I0930 12:46:41.166806 5002 scope.go:117] "RemoveContainer" containerID="4090f4eb041f661c6d248abe0600dc8051b07f9fa8e3d353c4686ba756a60352" Sep 30 12:46:41 crc kubenswrapper[5002]: I0930 12:46:41.187185 5002 scope.go:117] "RemoveContainer" containerID="461ba45de84cc66f64757100ba8e07901c75bfa41cac76eefdcc7d5b972e721b" Sep 30 12:46:41 crc kubenswrapper[5002]: I0930 12:46:41.229391 5002 scope.go:117] "RemoveContainer" containerID="b0de66aa455981cebaaf74f42189600b23c5ea003e81de2b461ebb6c70c5896c" Sep 30 12:46:41 crc kubenswrapper[5002]: I0930 12:46:41.271978 5002 scope.go:117] "RemoveContainer" containerID="139979365d4cc97837ff1cac7738f921613c6b24c1f221a8ba1f145ef3dc3c16" Sep 30 12:46:41 crc kubenswrapper[5002]: I0930 12:46:41.317691 5002 scope.go:117] "RemoveContainer" containerID="c62def32dc5b26d5fce6bf5e1d5c485cde81cbb6939baf1fee77af7a4c03cc63" Sep 30 12:46:41 crc kubenswrapper[5002]: I0930 12:46:41.386569 5002 scope.go:117] "RemoveContainer" containerID="10bc1990208a4da672704ee040a99cee3e574636e52d778178741af4c257aeab" Sep 30 12:46:41 crc kubenswrapper[5002]: I0930 12:46:41.408840 5002 scope.go:117] "RemoveContainer" containerID="0a2e3563a6f51d6fc971cee3bbf4a53e725a1c9b662e1c1557fc4ced36606150" Sep 30 12:46:41 crc kubenswrapper[5002]: I0930 12:46:41.429158 5002 scope.go:117] "RemoveContainer" containerID="bc58330b98b69f3fe76c78e4c9bb9f5b212dd7f5f18d1d02b9b7a4234d078dd5" Sep 30 12:46:41 crc kubenswrapper[5002]: I0930 12:46:41.448934 5002 scope.go:117] "RemoveContainer" containerID="db30d89e04cb503d687ee295ed1aeacb7254c3e37f0e6cb897eb21d57b79825c" Sep 30 12:46:41 crc kubenswrapper[5002]: I0930 12:46:41.465948 5002 scope.go:117] "RemoveContainer" containerID="137c9b6f15085a54fb7618646ccbc40198a92fa016da7d965d71e457a9755476" Sep 30 12:46:42 crc kubenswrapper[5002]: I0930 12:46:42.688391 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2a8ad8b-59e6-407f-b634-38dc40374764" path="/var/lib/kubelet/pods/e2a8ad8b-59e6-407f-b634-38dc40374764/volumes" Sep 30 12:46:43 crc kubenswrapper[5002]: I0930 12:46:43.677017 5002 scope.go:117] "RemoveContainer" containerID="a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677" Sep 30 12:46:43 crc kubenswrapper[5002]: E0930 12:46:43.677515 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:46:55 crc kubenswrapper[5002]: I0930 12:46:55.677048 5002 scope.go:117] "RemoveContainer" containerID="a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677" Sep 30 12:46:55 crc kubenswrapper[5002]: E0930 12:46:55.678848 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:47:07 crc kubenswrapper[5002]: I0930 12:47:07.676849 5002 scope.go:117] "RemoveContainer" containerID="a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677" Sep 30 12:47:07 crc kubenswrapper[5002]: E0930 12:47:07.677532 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:47:09 crc kubenswrapper[5002]: I0930 12:47:09.037068 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-c8b1-account-create-tkp6v"] Sep 30 12:47:09 crc kubenswrapper[5002]: I0930 12:47:09.045113 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-c8b1-account-create-tkp6v"] Sep 30 12:47:10 crc kubenswrapper[5002]: I0930 12:47:10.021534 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-0907-account-create-r5dxh"] Sep 30 12:47:10 crc kubenswrapper[5002]: I0930 12:47:10.028079 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-dcab-account-create-mxdfk"] Sep 30 12:47:10 crc kubenswrapper[5002]: I0930 12:47:10.035897 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-0907-account-create-r5dxh"] Sep 30 12:47:10 crc kubenswrapper[5002]: I0930 12:47:10.043089 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-dcab-account-create-mxdfk"] Sep 30 12:47:10 crc kubenswrapper[5002]: I0930 12:47:10.696072 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="153e0644-8b78-4c21-9377-a0d4c7cf0848" path="/var/lib/kubelet/pods/153e0644-8b78-4c21-9377-a0d4c7cf0848/volumes" Sep 30 12:47:10 crc kubenswrapper[5002]: I0930 12:47:10.697264 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="397fcd72-2dc0-4de0-9a5d-69fc3b14640a" path="/var/lib/kubelet/pods/397fcd72-2dc0-4de0-9a5d-69fc3b14640a/volumes" Sep 30 12:47:10 crc kubenswrapper[5002]: I0930 12:47:10.698136 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67948d65-c149-45be-914e-30dc00325da6" path="/var/lib/kubelet/pods/67948d65-c149-45be-914e-30dc00325da6/volumes" Sep 30 12:47:13 crc kubenswrapper[5002]: I0930 12:47:13.034173 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-88rff"] Sep 30 12:47:13 crc kubenswrapper[5002]: I0930 12:47:13.066824 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-88rff"] Sep 30 12:47:14 crc kubenswrapper[5002]: I0930 12:47:14.688222 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="418c45be-fdf8-41be-899d-a75e1ab0acef" path="/var/lib/kubelet/pods/418c45be-fdf8-41be-899d-a75e1ab0acef/volumes" Sep 30 12:47:18 crc kubenswrapper[5002]: I0930 12:47:18.676332 5002 scope.go:117] "RemoveContainer" containerID="a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677" Sep 30 12:47:18 crc kubenswrapper[5002]: E0930 12:47:18.677571 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:47:30 crc kubenswrapper[5002]: I0930 12:47:30.676152 5002 scope.go:117] "RemoveContainer" containerID="a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677" Sep 30 12:47:30 crc kubenswrapper[5002]: E0930 12:47:30.677718 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:47:41 crc kubenswrapper[5002]: I0930 12:47:41.641828 5002 scope.go:117] "RemoveContainer" containerID="0236891218dc97d1b4becd90b51dfe0651b3e5525f5ae116726ca521324018c9" Sep 30 12:47:41 crc kubenswrapper[5002]: I0930 12:47:41.666436 5002 scope.go:117] "RemoveContainer" containerID="a782f97e1fa15c29109a4ce3a8bfd14304f0c32d00cdee3a86f5283b51fefa0e" Sep 30 12:47:41 crc kubenswrapper[5002]: I0930 12:47:41.738143 5002 scope.go:117] "RemoveContainer" containerID="c1a881029eb753f5bb8fdb3b059612db62c1dec044feab98e868aa941e59a615" Sep 30 12:47:41 crc kubenswrapper[5002]: I0930 12:47:41.796267 5002 scope.go:117] "RemoveContainer" containerID="57ebfa51022e322f9dd7641936908a30044d5cc2bbf5d1c6b50d263f0dcab1f9" Sep 30 12:47:41 crc kubenswrapper[5002]: I0930 12:47:41.819187 5002 scope.go:117] "RemoveContainer" containerID="fa9eea698781ec5b9c457e406dd33da4c5fdd3ca7f67169866bf33c25f461b3b" Sep 30 12:47:42 crc kubenswrapper[5002]: I0930 12:47:42.676878 5002 scope.go:117] "RemoveContainer" containerID="a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677" Sep 30 12:47:42 crc kubenswrapper[5002]: E0930 12:47:42.677424 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:47:46 crc kubenswrapper[5002]: I0930 12:47:46.515430 5002 generic.go:334] "Generic (PLEG): container finished" podID="0b171fad-8a7f-4271-b1e6-43b03111244d" containerID="ff0bf4ee83f178fa2d5a452f3a8dd4b4bc9e1f4a537be11692721fbf9d5c4770" exitCode=0 Sep 30 12:47:46 crc kubenswrapper[5002]: I0930 12:47:46.515565 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-p69m6" event={"ID":"0b171fad-8a7f-4271-b1e6-43b03111244d","Type":"ContainerDied","Data":"ff0bf4ee83f178fa2d5a452f3a8dd4b4bc9e1f4a537be11692721fbf9d5c4770"} Sep 30 12:47:47 crc kubenswrapper[5002]: I0930 12:47:47.908271 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-p69m6" Sep 30 12:47:47 crc kubenswrapper[5002]: I0930 12:47:47.987038 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vwk9p\" (UniqueName: \"kubernetes.io/projected/0b171fad-8a7f-4271-b1e6-43b03111244d-kube-api-access-vwk9p\") pod \"0b171fad-8a7f-4271-b1e6-43b03111244d\" (UID: \"0b171fad-8a7f-4271-b1e6-43b03111244d\") " Sep 30 12:47:47 crc kubenswrapper[5002]: I0930 12:47:47.987175 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0b171fad-8a7f-4271-b1e6-43b03111244d-inventory\") pod \"0b171fad-8a7f-4271-b1e6-43b03111244d\" (UID: \"0b171fad-8a7f-4271-b1e6-43b03111244d\") " Sep 30 12:47:47 crc kubenswrapper[5002]: I0930 12:47:47.987305 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0b171fad-8a7f-4271-b1e6-43b03111244d-ssh-key\") pod \"0b171fad-8a7f-4271-b1e6-43b03111244d\" (UID: \"0b171fad-8a7f-4271-b1e6-43b03111244d\") " Sep 30 12:47:47 crc kubenswrapper[5002]: I0930 12:47:47.993517 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b171fad-8a7f-4271-b1e6-43b03111244d-kube-api-access-vwk9p" (OuterVolumeSpecName: "kube-api-access-vwk9p") pod "0b171fad-8a7f-4271-b1e6-43b03111244d" (UID: "0b171fad-8a7f-4271-b1e6-43b03111244d"). InnerVolumeSpecName "kube-api-access-vwk9p". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:47:48 crc kubenswrapper[5002]: I0930 12:47:48.016322 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b171fad-8a7f-4271-b1e6-43b03111244d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0b171fad-8a7f-4271-b1e6-43b03111244d" (UID: "0b171fad-8a7f-4271-b1e6-43b03111244d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:47:48 crc kubenswrapper[5002]: I0930 12:47:48.023559 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b171fad-8a7f-4271-b1e6-43b03111244d-inventory" (OuterVolumeSpecName: "inventory") pod "0b171fad-8a7f-4271-b1e6-43b03111244d" (UID: "0b171fad-8a7f-4271-b1e6-43b03111244d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:47:48 crc kubenswrapper[5002]: I0930 12:47:48.096257 5002 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0b171fad-8a7f-4271-b1e6-43b03111244d-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 12:47:48 crc kubenswrapper[5002]: I0930 12:47:48.096337 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vwk9p\" (UniqueName: \"kubernetes.io/projected/0b171fad-8a7f-4271-b1e6-43b03111244d-kube-api-access-vwk9p\") on node \"crc\" DevicePath \"\"" Sep 30 12:47:48 crc kubenswrapper[5002]: I0930 12:47:48.096360 5002 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0b171fad-8a7f-4271-b1e6-43b03111244d-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 12:47:48 crc kubenswrapper[5002]: I0930 12:47:48.536031 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-p69m6" event={"ID":"0b171fad-8a7f-4271-b1e6-43b03111244d","Type":"ContainerDied","Data":"53d69441b0fe46988cbcd1ec36ced60891ba9822ef1ca40578351174c736cb20"} Sep 30 12:47:48 crc kubenswrapper[5002]: I0930 12:47:48.536088 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="53d69441b0fe46988cbcd1ec36ced60891ba9822ef1ca40578351174c736cb20" Sep 30 12:47:48 crc kubenswrapper[5002]: I0930 12:47:48.536075 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-p69m6" Sep 30 12:47:48 crc kubenswrapper[5002]: I0930 12:47:48.625063 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-64qkx"] Sep 30 12:47:48 crc kubenswrapper[5002]: E0930 12:47:48.625690 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b171fad-8a7f-4271-b1e6-43b03111244d" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 30 12:47:48 crc kubenswrapper[5002]: I0930 12:47:48.625706 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b171fad-8a7f-4271-b1e6-43b03111244d" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 30 12:47:48 crc kubenswrapper[5002]: I0930 12:47:48.625891 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b171fad-8a7f-4271-b1e6-43b03111244d" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 30 12:47:48 crc kubenswrapper[5002]: I0930 12:47:48.626493 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-64qkx" Sep 30 12:47:48 crc kubenswrapper[5002]: I0930 12:47:48.629926 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 12:47:48 crc kubenswrapper[5002]: I0930 12:47:48.629943 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 12:47:48 crc kubenswrapper[5002]: I0930 12:47:48.630106 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 12:47:48 crc kubenswrapper[5002]: I0930 12:47:48.630227 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-ddhrk" Sep 30 12:47:48 crc kubenswrapper[5002]: I0930 12:47:48.637504 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-64qkx"] Sep 30 12:47:48 crc kubenswrapper[5002]: I0930 12:47:48.706701 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a049d73d-c168-40c1-a943-9df4f221879a-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-64qkx\" (UID: \"a049d73d-c168-40c1-a943-9df4f221879a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-64qkx" Sep 30 12:47:48 crc kubenswrapper[5002]: I0930 12:47:48.706790 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dr78r\" (UniqueName: \"kubernetes.io/projected/a049d73d-c168-40c1-a943-9df4f221879a-kube-api-access-dr78r\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-64qkx\" (UID: \"a049d73d-c168-40c1-a943-9df4f221879a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-64qkx" Sep 30 12:47:48 crc kubenswrapper[5002]: I0930 12:47:48.707025 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a049d73d-c168-40c1-a943-9df4f221879a-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-64qkx\" (UID: \"a049d73d-c168-40c1-a943-9df4f221879a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-64qkx" Sep 30 12:47:48 crc kubenswrapper[5002]: I0930 12:47:48.808435 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a049d73d-c168-40c1-a943-9df4f221879a-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-64qkx\" (UID: \"a049d73d-c168-40c1-a943-9df4f221879a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-64qkx" Sep 30 12:47:48 crc kubenswrapper[5002]: I0930 12:47:48.808530 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a049d73d-c168-40c1-a943-9df4f221879a-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-64qkx\" (UID: \"a049d73d-c168-40c1-a943-9df4f221879a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-64qkx" Sep 30 12:47:48 crc kubenswrapper[5002]: I0930 12:47:48.808605 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dr78r\" (UniqueName: \"kubernetes.io/projected/a049d73d-c168-40c1-a943-9df4f221879a-kube-api-access-dr78r\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-64qkx\" (UID: \"a049d73d-c168-40c1-a943-9df4f221879a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-64qkx" Sep 30 12:47:48 crc kubenswrapper[5002]: I0930 12:47:48.813901 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a049d73d-c168-40c1-a943-9df4f221879a-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-64qkx\" (UID: \"a049d73d-c168-40c1-a943-9df4f221879a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-64qkx" Sep 30 12:47:48 crc kubenswrapper[5002]: I0930 12:47:48.814375 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a049d73d-c168-40c1-a943-9df4f221879a-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-64qkx\" (UID: \"a049d73d-c168-40c1-a943-9df4f221879a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-64qkx" Sep 30 12:47:48 crc kubenswrapper[5002]: I0930 12:47:48.826691 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dr78r\" (UniqueName: \"kubernetes.io/projected/a049d73d-c168-40c1-a943-9df4f221879a-kube-api-access-dr78r\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-64qkx\" (UID: \"a049d73d-c168-40c1-a943-9df4f221879a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-64qkx" Sep 30 12:47:48 crc kubenswrapper[5002]: I0930 12:47:48.946258 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-64qkx" Sep 30 12:47:49 crc kubenswrapper[5002]: I0930 12:47:49.044513 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-mvczr"] Sep 30 12:47:49 crc kubenswrapper[5002]: I0930 12:47:49.063550 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-mvczr"] Sep 30 12:47:49 crc kubenswrapper[5002]: I0930 12:47:49.496884 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-64qkx"] Sep 30 12:47:49 crc kubenswrapper[5002]: I0930 12:47:49.544983 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-64qkx" event={"ID":"a049d73d-c168-40c1-a943-9df4f221879a","Type":"ContainerStarted","Data":"691dac7bf9d9355b8d30fa8da2f463564bda07ae228a4525facc540dea7cdd2b"} Sep 30 12:47:50 crc kubenswrapper[5002]: I0930 12:47:50.687505 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8687f1f5-15da-479c-8661-948b437fcb33" path="/var/lib/kubelet/pods/8687f1f5-15da-479c-8661-948b437fcb33/volumes" Sep 30 12:47:51 crc kubenswrapper[5002]: I0930 12:47:51.041669 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-qwvj9"] Sep 30 12:47:51 crc kubenswrapper[5002]: I0930 12:47:51.051892 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-qwvj9"] Sep 30 12:47:51 crc kubenswrapper[5002]: I0930 12:47:51.566847 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-64qkx" event={"ID":"a049d73d-c168-40c1-a943-9df4f221879a","Type":"ContainerStarted","Data":"268ce2a2ae4ac6db4a21a84bfdf95672f677cdb444e59b38d8d3003c187bd24f"} Sep 30 12:47:51 crc kubenswrapper[5002]: I0930 12:47:51.586455 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-64qkx" podStartSLOduration=2.54686269 podStartE2EDuration="3.58643032s" podCreationTimestamp="2025-09-30 12:47:48 +0000 UTC" firstStartedPulling="2025-09-30 12:47:49.501848778 +0000 UTC m=+1643.751530914" lastFinishedPulling="2025-09-30 12:47:50.541416398 +0000 UTC m=+1644.791098544" observedRunningTime="2025-09-30 12:47:51.583990967 +0000 UTC m=+1645.833673123" watchObservedRunningTime="2025-09-30 12:47:51.58643032 +0000 UTC m=+1645.836112466" Sep 30 12:47:52 crc kubenswrapper[5002]: I0930 12:47:52.686901 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d52650cc-2190-4b6f-8a3f-f506262075d8" path="/var/lib/kubelet/pods/d52650cc-2190-4b6f-8a3f-f506262075d8/volumes" Sep 30 12:47:55 crc kubenswrapper[5002]: I0930 12:47:55.612031 5002 generic.go:334] "Generic (PLEG): container finished" podID="a049d73d-c168-40c1-a943-9df4f221879a" containerID="268ce2a2ae4ac6db4a21a84bfdf95672f677cdb444e59b38d8d3003c187bd24f" exitCode=0 Sep 30 12:47:55 crc kubenswrapper[5002]: I0930 12:47:55.612145 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-64qkx" event={"ID":"a049d73d-c168-40c1-a943-9df4f221879a","Type":"ContainerDied","Data":"268ce2a2ae4ac6db4a21a84bfdf95672f677cdb444e59b38d8d3003c187bd24f"} Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.019620 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-64qkx" Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.179156 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a049d73d-c168-40c1-a943-9df4f221879a-inventory\") pod \"a049d73d-c168-40c1-a943-9df4f221879a\" (UID: \"a049d73d-c168-40c1-a943-9df4f221879a\") " Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.179275 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dr78r\" (UniqueName: \"kubernetes.io/projected/a049d73d-c168-40c1-a943-9df4f221879a-kube-api-access-dr78r\") pod \"a049d73d-c168-40c1-a943-9df4f221879a\" (UID: \"a049d73d-c168-40c1-a943-9df4f221879a\") " Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.179291 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a049d73d-c168-40c1-a943-9df4f221879a-ssh-key\") pod \"a049d73d-c168-40c1-a943-9df4f221879a\" (UID: \"a049d73d-c168-40c1-a943-9df4f221879a\") " Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.185055 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a049d73d-c168-40c1-a943-9df4f221879a-kube-api-access-dr78r" (OuterVolumeSpecName: "kube-api-access-dr78r") pod "a049d73d-c168-40c1-a943-9df4f221879a" (UID: "a049d73d-c168-40c1-a943-9df4f221879a"). InnerVolumeSpecName "kube-api-access-dr78r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.207014 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a049d73d-c168-40c1-a943-9df4f221879a-inventory" (OuterVolumeSpecName: "inventory") pod "a049d73d-c168-40c1-a943-9df4f221879a" (UID: "a049d73d-c168-40c1-a943-9df4f221879a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.209175 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a049d73d-c168-40c1-a943-9df4f221879a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a049d73d-c168-40c1-a943-9df4f221879a" (UID: "a049d73d-c168-40c1-a943-9df4f221879a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.281065 5002 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a049d73d-c168-40c1-a943-9df4f221879a-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.281109 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dr78r\" (UniqueName: \"kubernetes.io/projected/a049d73d-c168-40c1-a943-9df4f221879a-kube-api-access-dr78r\") on node \"crc\" DevicePath \"\"" Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.281124 5002 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a049d73d-c168-40c1-a943-9df4f221879a-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.631553 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-64qkx" event={"ID":"a049d73d-c168-40c1-a943-9df4f221879a","Type":"ContainerDied","Data":"691dac7bf9d9355b8d30fa8da2f463564bda07ae228a4525facc540dea7cdd2b"} Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.631869 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="691dac7bf9d9355b8d30fa8da2f463564bda07ae228a4525facc540dea7cdd2b" Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.631614 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-64qkx" Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.675946 5002 scope.go:117] "RemoveContainer" containerID="a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677" Sep 30 12:47:57 crc kubenswrapper[5002]: E0930 12:47:57.676207 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.705334 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpzsf"] Sep 30 12:47:57 crc kubenswrapper[5002]: E0930 12:47:57.705750 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a049d73d-c168-40c1-a943-9df4f221879a" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.705768 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a049d73d-c168-40c1-a943-9df4f221879a" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.705957 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a049d73d-c168-40c1-a943-9df4f221879a" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.706610 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpzsf" Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.710911 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.711102 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.711836 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.712209 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-ddhrk" Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.724762 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpzsf"] Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.792112 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktfxw\" (UniqueName: \"kubernetes.io/projected/8ebc7091-7fed-4943-9bff-8d1d9ab3db90-kube-api-access-ktfxw\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dpzsf\" (UID: \"8ebc7091-7fed-4943-9bff-8d1d9ab3db90\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpzsf" Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.792210 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8ebc7091-7fed-4943-9bff-8d1d9ab3db90-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dpzsf\" (UID: \"8ebc7091-7fed-4943-9bff-8d1d9ab3db90\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpzsf" Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.792796 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8ebc7091-7fed-4943-9bff-8d1d9ab3db90-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dpzsf\" (UID: \"8ebc7091-7fed-4943-9bff-8d1d9ab3db90\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpzsf" Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.895437 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktfxw\" (UniqueName: \"kubernetes.io/projected/8ebc7091-7fed-4943-9bff-8d1d9ab3db90-kube-api-access-ktfxw\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dpzsf\" (UID: \"8ebc7091-7fed-4943-9bff-8d1d9ab3db90\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpzsf" Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.895583 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8ebc7091-7fed-4943-9bff-8d1d9ab3db90-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dpzsf\" (UID: \"8ebc7091-7fed-4943-9bff-8d1d9ab3db90\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpzsf" Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.895686 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8ebc7091-7fed-4943-9bff-8d1d9ab3db90-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dpzsf\" (UID: \"8ebc7091-7fed-4943-9bff-8d1d9ab3db90\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpzsf" Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.901699 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8ebc7091-7fed-4943-9bff-8d1d9ab3db90-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dpzsf\" (UID: \"8ebc7091-7fed-4943-9bff-8d1d9ab3db90\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpzsf" Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.902103 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8ebc7091-7fed-4943-9bff-8d1d9ab3db90-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dpzsf\" (UID: \"8ebc7091-7fed-4943-9bff-8d1d9ab3db90\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpzsf" Sep 30 12:47:57 crc kubenswrapper[5002]: I0930 12:47:57.914823 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktfxw\" (UniqueName: \"kubernetes.io/projected/8ebc7091-7fed-4943-9bff-8d1d9ab3db90-kube-api-access-ktfxw\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dpzsf\" (UID: \"8ebc7091-7fed-4943-9bff-8d1d9ab3db90\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpzsf" Sep 30 12:47:58 crc kubenswrapper[5002]: I0930 12:47:58.023249 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpzsf" Sep 30 12:47:58 crc kubenswrapper[5002]: I0930 12:47:58.542905 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpzsf"] Sep 30 12:47:58 crc kubenswrapper[5002]: I0930 12:47:58.641856 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpzsf" event={"ID":"8ebc7091-7fed-4943-9bff-8d1d9ab3db90","Type":"ContainerStarted","Data":"9a36da511eac3a0f9caa7962df296f9a727f19ecb68e430912b6bb16cbf93e46"} Sep 30 12:47:59 crc kubenswrapper[5002]: I0930 12:47:59.655582 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpzsf" event={"ID":"8ebc7091-7fed-4943-9bff-8d1d9ab3db90","Type":"ContainerStarted","Data":"69e4342b65580958aefe090959eab21ec49395c41b6cca593d14b4dce89f2217"} Sep 30 12:47:59 crc kubenswrapper[5002]: I0930 12:47:59.680460 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpzsf" podStartSLOduration=2.056212361 podStartE2EDuration="2.680438129s" podCreationTimestamp="2025-09-30 12:47:57 +0000 UTC" firstStartedPulling="2025-09-30 12:47:58.548174947 +0000 UTC m=+1652.797857093" lastFinishedPulling="2025-09-30 12:47:59.172400715 +0000 UTC m=+1653.422082861" observedRunningTime="2025-09-30 12:47:59.67810126 +0000 UTC m=+1653.927783406" watchObservedRunningTime="2025-09-30 12:47:59.680438129 +0000 UTC m=+1653.930120295" Sep 30 12:48:11 crc kubenswrapper[5002]: I0930 12:48:11.676302 5002 scope.go:117] "RemoveContainer" containerID="a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677" Sep 30 12:48:11 crc kubenswrapper[5002]: E0930 12:48:11.677205 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:48:13 crc kubenswrapper[5002]: I0930 12:48:13.049916 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-bnf58"] Sep 30 12:48:13 crc kubenswrapper[5002]: I0930 12:48:13.066318 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-bnf58"] Sep 30 12:48:14 crc kubenswrapper[5002]: I0930 12:48:14.029144 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-vxpxc"] Sep 30 12:48:14 crc kubenswrapper[5002]: I0930 12:48:14.036389 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-vxpxc"] Sep 30 12:48:14 crc kubenswrapper[5002]: I0930 12:48:14.687733 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fdf0c77-68ae-41ff-b6b5-122baa461b8c" path="/var/lib/kubelet/pods/4fdf0c77-68ae-41ff-b6b5-122baa461b8c/volumes" Sep 30 12:48:14 crc kubenswrapper[5002]: I0930 12:48:14.688432 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c398da4-8e97-4ee7-83bb-f958c41fabff" path="/var/lib/kubelet/pods/6c398da4-8e97-4ee7-83bb-f958c41fabff/volumes" Sep 30 12:48:16 crc kubenswrapper[5002]: I0930 12:48:16.025277 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-wldrd"] Sep 30 12:48:16 crc kubenswrapper[5002]: I0930 12:48:16.031893 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-6khcg"] Sep 30 12:48:16 crc kubenswrapper[5002]: I0930 12:48:16.042870 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-wldrd"] Sep 30 12:48:16 crc kubenswrapper[5002]: I0930 12:48:16.050242 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-6khcg"] Sep 30 12:48:16 crc kubenswrapper[5002]: I0930 12:48:16.056494 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-rb685"] Sep 30 12:48:16 crc kubenswrapper[5002]: I0930 12:48:16.063418 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-rb685"] Sep 30 12:48:16 crc kubenswrapper[5002]: I0930 12:48:16.689408 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="092e2ea7-c4f1-4ba7-b9e8-064cbbfa53aa" path="/var/lib/kubelet/pods/092e2ea7-c4f1-4ba7-b9e8-064cbbfa53aa/volumes" Sep 30 12:48:16 crc kubenswrapper[5002]: I0930 12:48:16.689982 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="93fde69b-7152-4a47-8d1d-fe6aa7376882" path="/var/lib/kubelet/pods/93fde69b-7152-4a47-8d1d-fe6aa7376882/volumes" Sep 30 12:48:16 crc kubenswrapper[5002]: I0930 12:48:16.690457 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a20a6156-ca1d-4c5c-b98f-b4e062bf4e9c" path="/var/lib/kubelet/pods/a20a6156-ca1d-4c5c-b98f-b4e062bf4e9c/volumes" Sep 30 12:48:26 crc kubenswrapper[5002]: I0930 12:48:26.042720 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-c69f-account-create-hcqbp"] Sep 30 12:48:26 crc kubenswrapper[5002]: I0930 12:48:26.051388 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-c69f-account-create-hcqbp"] Sep 30 12:48:26 crc kubenswrapper[5002]: I0930 12:48:26.686342 5002 scope.go:117] "RemoveContainer" containerID="a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677" Sep 30 12:48:26 crc kubenswrapper[5002]: E0930 12:48:26.686886 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:48:26 crc kubenswrapper[5002]: I0930 12:48:26.689014 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa09a798-5d72-49bb-adb7-ea553a2f6e74" path="/var/lib/kubelet/pods/fa09a798-5d72-49bb-adb7-ea553a2f6e74/volumes" Sep 30 12:48:27 crc kubenswrapper[5002]: I0930 12:48:27.028449 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-f016-account-create-g57kg"] Sep 30 12:48:27 crc kubenswrapper[5002]: I0930 12:48:27.036398 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-89b0-account-create-qsmbt"] Sep 30 12:48:27 crc kubenswrapper[5002]: I0930 12:48:27.044527 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-f016-account-create-g57kg"] Sep 30 12:48:27 crc kubenswrapper[5002]: I0930 12:48:27.051781 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-89b0-account-create-qsmbt"] Sep 30 12:48:28 crc kubenswrapper[5002]: I0930 12:48:28.691915 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3fcc312f-04fb-44a6-bfba-f083655d42bb" path="/var/lib/kubelet/pods/3fcc312f-04fb-44a6-bfba-f083655d42bb/volumes" Sep 30 12:48:28 crc kubenswrapper[5002]: I0930 12:48:28.693224 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9278220-9445-4f33-a4e4-a2224206b58e" path="/var/lib/kubelet/pods/a9278220-9445-4f33-a4e4-a2224206b58e/volumes" Sep 30 12:48:35 crc kubenswrapper[5002]: I0930 12:48:35.988808 5002 generic.go:334] "Generic (PLEG): container finished" podID="8ebc7091-7fed-4943-9bff-8d1d9ab3db90" containerID="69e4342b65580958aefe090959eab21ec49395c41b6cca593d14b4dce89f2217" exitCode=0 Sep 30 12:48:35 crc kubenswrapper[5002]: I0930 12:48:35.988872 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpzsf" event={"ID":"8ebc7091-7fed-4943-9bff-8d1d9ab3db90","Type":"ContainerDied","Data":"69e4342b65580958aefe090959eab21ec49395c41b6cca593d14b4dce89f2217"} Sep 30 12:48:37 crc kubenswrapper[5002]: I0930 12:48:37.403513 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpzsf" Sep 30 12:48:37 crc kubenswrapper[5002]: I0930 12:48:37.452492 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8ebc7091-7fed-4943-9bff-8d1d9ab3db90-ssh-key\") pod \"8ebc7091-7fed-4943-9bff-8d1d9ab3db90\" (UID: \"8ebc7091-7fed-4943-9bff-8d1d9ab3db90\") " Sep 30 12:48:37 crc kubenswrapper[5002]: I0930 12:48:37.452586 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8ebc7091-7fed-4943-9bff-8d1d9ab3db90-inventory\") pod \"8ebc7091-7fed-4943-9bff-8d1d9ab3db90\" (UID: \"8ebc7091-7fed-4943-9bff-8d1d9ab3db90\") " Sep 30 12:48:37 crc kubenswrapper[5002]: I0930 12:48:37.452811 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktfxw\" (UniqueName: \"kubernetes.io/projected/8ebc7091-7fed-4943-9bff-8d1d9ab3db90-kube-api-access-ktfxw\") pod \"8ebc7091-7fed-4943-9bff-8d1d9ab3db90\" (UID: \"8ebc7091-7fed-4943-9bff-8d1d9ab3db90\") " Sep 30 12:48:37 crc kubenswrapper[5002]: I0930 12:48:37.466053 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ebc7091-7fed-4943-9bff-8d1d9ab3db90-kube-api-access-ktfxw" (OuterVolumeSpecName: "kube-api-access-ktfxw") pod "8ebc7091-7fed-4943-9bff-8d1d9ab3db90" (UID: "8ebc7091-7fed-4943-9bff-8d1d9ab3db90"). InnerVolumeSpecName "kube-api-access-ktfxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:48:37 crc kubenswrapper[5002]: I0930 12:48:37.478156 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ebc7091-7fed-4943-9bff-8d1d9ab3db90-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8ebc7091-7fed-4943-9bff-8d1d9ab3db90" (UID: "8ebc7091-7fed-4943-9bff-8d1d9ab3db90"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:48:37 crc kubenswrapper[5002]: I0930 12:48:37.488291 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ebc7091-7fed-4943-9bff-8d1d9ab3db90-inventory" (OuterVolumeSpecName: "inventory") pod "8ebc7091-7fed-4943-9bff-8d1d9ab3db90" (UID: "8ebc7091-7fed-4943-9bff-8d1d9ab3db90"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:48:37 crc kubenswrapper[5002]: I0930 12:48:37.556524 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktfxw\" (UniqueName: \"kubernetes.io/projected/8ebc7091-7fed-4943-9bff-8d1d9ab3db90-kube-api-access-ktfxw\") on node \"crc\" DevicePath \"\"" Sep 30 12:48:37 crc kubenswrapper[5002]: I0930 12:48:37.557402 5002 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8ebc7091-7fed-4943-9bff-8d1d9ab3db90-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 12:48:37 crc kubenswrapper[5002]: I0930 12:48:37.557499 5002 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8ebc7091-7fed-4943-9bff-8d1d9ab3db90-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 12:48:37 crc kubenswrapper[5002]: I0930 12:48:37.676400 5002 scope.go:117] "RemoveContainer" containerID="a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677" Sep 30 12:48:37 crc kubenswrapper[5002]: E0930 12:48:37.676881 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:48:38 crc kubenswrapper[5002]: I0930 12:48:38.008245 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpzsf" event={"ID":"8ebc7091-7fed-4943-9bff-8d1d9ab3db90","Type":"ContainerDied","Data":"9a36da511eac3a0f9caa7962df296f9a727f19ecb68e430912b6bb16cbf93e46"} Sep 30 12:48:38 crc kubenswrapper[5002]: I0930 12:48:38.008297 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9a36da511eac3a0f9caa7962df296f9a727f19ecb68e430912b6bb16cbf93e46" Sep 30 12:48:38 crc kubenswrapper[5002]: I0930 12:48:38.008331 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpzsf" Sep 30 12:48:38 crc kubenswrapper[5002]: I0930 12:48:38.099869 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm"] Sep 30 12:48:38 crc kubenswrapper[5002]: E0930 12:48:38.100229 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ebc7091-7fed-4943-9bff-8d1d9ab3db90" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 30 12:48:38 crc kubenswrapper[5002]: I0930 12:48:38.100245 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ebc7091-7fed-4943-9bff-8d1d9ab3db90" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 30 12:48:38 crc kubenswrapper[5002]: I0930 12:48:38.100452 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ebc7091-7fed-4943-9bff-8d1d9ab3db90" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 30 12:48:38 crc kubenswrapper[5002]: I0930 12:48:38.101049 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm" Sep 30 12:48:38 crc kubenswrapper[5002]: I0930 12:48:38.106246 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 12:48:38 crc kubenswrapper[5002]: I0930 12:48:38.106350 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 12:48:38 crc kubenswrapper[5002]: I0930 12:48:38.106874 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-ddhrk" Sep 30 12:48:38 crc kubenswrapper[5002]: I0930 12:48:38.107158 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 12:48:38 crc kubenswrapper[5002]: I0930 12:48:38.115977 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm"] Sep 30 12:48:38 crc kubenswrapper[5002]: I0930 12:48:38.170191 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msfk9\" (UniqueName: \"kubernetes.io/projected/36ca4049-6444-4a67-b607-b15095a3dabf-kube-api-access-msfk9\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm\" (UID: \"36ca4049-6444-4a67-b607-b15095a3dabf\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm" Sep 30 12:48:38 crc kubenswrapper[5002]: I0930 12:48:38.170453 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/36ca4049-6444-4a67-b607-b15095a3dabf-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm\" (UID: \"36ca4049-6444-4a67-b607-b15095a3dabf\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm" Sep 30 12:48:38 crc kubenswrapper[5002]: I0930 12:48:38.170668 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/36ca4049-6444-4a67-b607-b15095a3dabf-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm\" (UID: \"36ca4049-6444-4a67-b607-b15095a3dabf\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm" Sep 30 12:48:38 crc kubenswrapper[5002]: I0930 12:48:38.272117 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/36ca4049-6444-4a67-b607-b15095a3dabf-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm\" (UID: \"36ca4049-6444-4a67-b607-b15095a3dabf\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm" Sep 30 12:48:38 crc kubenswrapper[5002]: I0930 12:48:38.272214 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/36ca4049-6444-4a67-b607-b15095a3dabf-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm\" (UID: \"36ca4049-6444-4a67-b607-b15095a3dabf\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm" Sep 30 12:48:38 crc kubenswrapper[5002]: I0930 12:48:38.272286 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msfk9\" (UniqueName: \"kubernetes.io/projected/36ca4049-6444-4a67-b607-b15095a3dabf-kube-api-access-msfk9\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm\" (UID: \"36ca4049-6444-4a67-b607-b15095a3dabf\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm" Sep 30 12:48:38 crc kubenswrapper[5002]: I0930 12:48:38.275834 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/36ca4049-6444-4a67-b607-b15095a3dabf-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm\" (UID: \"36ca4049-6444-4a67-b607-b15095a3dabf\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm" Sep 30 12:48:38 crc kubenswrapper[5002]: I0930 12:48:38.277641 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/36ca4049-6444-4a67-b607-b15095a3dabf-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm\" (UID: \"36ca4049-6444-4a67-b607-b15095a3dabf\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm" Sep 30 12:48:38 crc kubenswrapper[5002]: I0930 12:48:38.291965 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msfk9\" (UniqueName: \"kubernetes.io/projected/36ca4049-6444-4a67-b607-b15095a3dabf-kube-api-access-msfk9\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm\" (UID: \"36ca4049-6444-4a67-b607-b15095a3dabf\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm" Sep 30 12:48:38 crc kubenswrapper[5002]: I0930 12:48:38.432880 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm" Sep 30 12:48:38 crc kubenswrapper[5002]: I0930 12:48:38.974113 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm"] Sep 30 12:48:39 crc kubenswrapper[5002]: I0930 12:48:39.016926 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm" event={"ID":"36ca4049-6444-4a67-b607-b15095a3dabf","Type":"ContainerStarted","Data":"53d2627cf85394f693f3d6477859f6e90460d24e012c5dcd11bd25f6ad7257a3"} Sep 30 12:48:41 crc kubenswrapper[5002]: I0930 12:48:41.035558 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm" event={"ID":"36ca4049-6444-4a67-b607-b15095a3dabf","Type":"ContainerStarted","Data":"e9e42acd59ea6254ed3013b74b08bfb410c3029a9ad1a912b4ccd559db2114b6"} Sep 30 12:48:41 crc kubenswrapper[5002]: I0930 12:48:41.059792 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm" podStartSLOduration=2.017022894 podStartE2EDuration="3.059768296s" podCreationTimestamp="2025-09-30 12:48:38 +0000 UTC" firstStartedPulling="2025-09-30 12:48:38.984093092 +0000 UTC m=+1693.233775238" lastFinishedPulling="2025-09-30 12:48:40.026838494 +0000 UTC m=+1694.276520640" observedRunningTime="2025-09-30 12:48:41.051016142 +0000 UTC m=+1695.300698318" watchObservedRunningTime="2025-09-30 12:48:41.059768296 +0000 UTC m=+1695.309450442" Sep 30 12:48:41 crc kubenswrapper[5002]: I0930 12:48:41.989287 5002 scope.go:117] "RemoveContainer" containerID="99e4b3a17cc927ad3e605fd9a30bdbb8f060cee29ef555dad7ccd90f2d022022" Sep 30 12:48:42 crc kubenswrapper[5002]: I0930 12:48:42.013123 5002 scope.go:117] "RemoveContainer" containerID="12cdb900bf0fb859526f074db970c8589cc60cb37f68824e568f580a6fdf5c63" Sep 30 12:48:42 crc kubenswrapper[5002]: I0930 12:48:42.077199 5002 scope.go:117] "RemoveContainer" containerID="d609d8d01b5ef3799f852ae0abba88b8382e4a4f949ffd3f883324d4be043ac7" Sep 30 12:48:42 crc kubenswrapper[5002]: I0930 12:48:42.118578 5002 scope.go:117] "RemoveContainer" containerID="239dcf3d4f1d76af4b15a736976090ea497fc3006086fecbc2b06e233d1d7553" Sep 30 12:48:42 crc kubenswrapper[5002]: I0930 12:48:42.178681 5002 scope.go:117] "RemoveContainer" containerID="a99c08c483c48a24fd03aaf010f4c106d01c65729b9e0bebd44143567528ada9" Sep 30 12:48:42 crc kubenswrapper[5002]: I0930 12:48:42.228761 5002 scope.go:117] "RemoveContainer" containerID="8a95094e63785df2c49d0117ed1d9bc8bff30ac583aa71825afe7b2e3d5ed8b6" Sep 30 12:48:42 crc kubenswrapper[5002]: I0930 12:48:42.277154 5002 scope.go:117] "RemoveContainer" containerID="e568b901f2f004554c2b51e6ca6d7bfb87cd86d554606ee1c5d0b24090253d1d" Sep 30 12:48:42 crc kubenswrapper[5002]: I0930 12:48:42.303056 5002 scope.go:117] "RemoveContainer" containerID="cb0b5286d1ef4e1be6bf180bf3c6c0e16cae232fa043f16393e9eba6d67b31a7" Sep 30 12:48:42 crc kubenswrapper[5002]: I0930 12:48:42.325244 5002 scope.go:117] "RemoveContainer" containerID="91bca0cfabf89f30330744570d3cbc0de9ddb00b0af437af6658490f5a99fdff" Sep 30 12:48:42 crc kubenswrapper[5002]: I0930 12:48:42.347469 5002 scope.go:117] "RemoveContainer" containerID="cceaa8ffba75c4a886212eaae9b7cec4b36f414dcd3dec93a47c8ca5278f9e2c" Sep 30 12:48:50 crc kubenswrapper[5002]: I0930 12:48:50.677406 5002 scope.go:117] "RemoveContainer" containerID="a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677" Sep 30 12:48:50 crc kubenswrapper[5002]: E0930 12:48:50.678213 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:48:54 crc kubenswrapper[5002]: I0930 12:48:54.036011 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-gjsbc"] Sep 30 12:48:54 crc kubenswrapper[5002]: I0930 12:48:54.042716 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-gjsbc"] Sep 30 12:48:54 crc kubenswrapper[5002]: I0930 12:48:54.687594 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c7c28f5-a6b2-4841-ac23-37c9167b3da4" path="/var/lib/kubelet/pods/7c7c28f5-a6b2-4841-ac23-37c9167b3da4/volumes" Sep 30 12:49:05 crc kubenswrapper[5002]: I0930 12:49:05.676810 5002 scope.go:117] "RemoveContainer" containerID="a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677" Sep 30 12:49:05 crc kubenswrapper[5002]: E0930 12:49:05.677768 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:49:16 crc kubenswrapper[5002]: I0930 12:49:16.692363 5002 scope.go:117] "RemoveContainer" containerID="a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677" Sep 30 12:49:16 crc kubenswrapper[5002]: E0930 12:49:16.705802 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:49:23 crc kubenswrapper[5002]: I0930 12:49:23.039881 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-8kcwc"] Sep 30 12:49:23 crc kubenswrapper[5002]: I0930 12:49:23.049867 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-8kcwc"] Sep 30 12:49:24 crc kubenswrapper[5002]: I0930 12:49:24.688604 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd4d414b-cd09-4feb-913f-174f17996cd1" path="/var/lib/kubelet/pods/cd4d414b-cd09-4feb-913f-174f17996cd1/volumes" Sep 30 12:49:25 crc kubenswrapper[5002]: I0930 12:49:25.033866 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6qsgx"] Sep 30 12:49:25 crc kubenswrapper[5002]: I0930 12:49:25.042037 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6qsgx"] Sep 30 12:49:26 crc kubenswrapper[5002]: I0930 12:49:26.689139 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4e8b9fc-5aff-4584-974f-ad060b75f0c3" path="/var/lib/kubelet/pods/c4e8b9fc-5aff-4584-974f-ad060b75f0c3/volumes" Sep 30 12:49:27 crc kubenswrapper[5002]: I0930 12:49:27.675948 5002 scope.go:117] "RemoveContainer" containerID="a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677" Sep 30 12:49:27 crc kubenswrapper[5002]: E0930 12:49:27.676562 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:49:34 crc kubenswrapper[5002]: I0930 12:49:34.528530 5002 generic.go:334] "Generic (PLEG): container finished" podID="36ca4049-6444-4a67-b607-b15095a3dabf" containerID="e9e42acd59ea6254ed3013b74b08bfb410c3029a9ad1a912b4ccd559db2114b6" exitCode=2 Sep 30 12:49:34 crc kubenswrapper[5002]: I0930 12:49:34.528605 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm" event={"ID":"36ca4049-6444-4a67-b607-b15095a3dabf","Type":"ContainerDied","Data":"e9e42acd59ea6254ed3013b74b08bfb410c3029a9ad1a912b4ccd559db2114b6"} Sep 30 12:49:35 crc kubenswrapper[5002]: I0930 12:49:35.929068 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm" Sep 30 12:49:36 crc kubenswrapper[5002]: I0930 12:49:36.040747 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-msfk9\" (UniqueName: \"kubernetes.io/projected/36ca4049-6444-4a67-b607-b15095a3dabf-kube-api-access-msfk9\") pod \"36ca4049-6444-4a67-b607-b15095a3dabf\" (UID: \"36ca4049-6444-4a67-b607-b15095a3dabf\") " Sep 30 12:49:36 crc kubenswrapper[5002]: I0930 12:49:36.040919 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/36ca4049-6444-4a67-b607-b15095a3dabf-ssh-key\") pod \"36ca4049-6444-4a67-b607-b15095a3dabf\" (UID: \"36ca4049-6444-4a67-b607-b15095a3dabf\") " Sep 30 12:49:36 crc kubenswrapper[5002]: I0930 12:49:36.041047 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/36ca4049-6444-4a67-b607-b15095a3dabf-inventory\") pod \"36ca4049-6444-4a67-b607-b15095a3dabf\" (UID: \"36ca4049-6444-4a67-b607-b15095a3dabf\") " Sep 30 12:49:36 crc kubenswrapper[5002]: I0930 12:49:36.046943 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36ca4049-6444-4a67-b607-b15095a3dabf-kube-api-access-msfk9" (OuterVolumeSpecName: "kube-api-access-msfk9") pod "36ca4049-6444-4a67-b607-b15095a3dabf" (UID: "36ca4049-6444-4a67-b607-b15095a3dabf"). InnerVolumeSpecName "kube-api-access-msfk9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:49:36 crc kubenswrapper[5002]: I0930 12:49:36.068081 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36ca4049-6444-4a67-b607-b15095a3dabf-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "36ca4049-6444-4a67-b607-b15095a3dabf" (UID: "36ca4049-6444-4a67-b607-b15095a3dabf"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:49:36 crc kubenswrapper[5002]: I0930 12:49:36.069699 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36ca4049-6444-4a67-b607-b15095a3dabf-inventory" (OuterVolumeSpecName: "inventory") pod "36ca4049-6444-4a67-b607-b15095a3dabf" (UID: "36ca4049-6444-4a67-b607-b15095a3dabf"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:49:36 crc kubenswrapper[5002]: I0930 12:49:36.143301 5002 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/36ca4049-6444-4a67-b607-b15095a3dabf-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 12:49:36 crc kubenswrapper[5002]: I0930 12:49:36.143344 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-msfk9\" (UniqueName: \"kubernetes.io/projected/36ca4049-6444-4a67-b607-b15095a3dabf-kube-api-access-msfk9\") on node \"crc\" DevicePath \"\"" Sep 30 12:49:36 crc kubenswrapper[5002]: I0930 12:49:36.143359 5002 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/36ca4049-6444-4a67-b607-b15095a3dabf-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 12:49:36 crc kubenswrapper[5002]: I0930 12:49:36.548819 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm" event={"ID":"36ca4049-6444-4a67-b607-b15095a3dabf","Type":"ContainerDied","Data":"53d2627cf85394f693f3d6477859f6e90460d24e012c5dcd11bd25f6ad7257a3"} Sep 30 12:49:36 crc kubenswrapper[5002]: I0930 12:49:36.548867 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="53d2627cf85394f693f3d6477859f6e90460d24e012c5dcd11bd25f6ad7257a3" Sep 30 12:49:36 crc kubenswrapper[5002]: I0930 12:49:36.548899 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm" Sep 30 12:49:41 crc kubenswrapper[5002]: I0930 12:49:41.676754 5002 scope.go:117] "RemoveContainer" containerID="a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677" Sep 30 12:49:41 crc kubenswrapper[5002]: E0930 12:49:41.677905 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:49:42 crc kubenswrapper[5002]: I0930 12:49:42.571907 5002 scope.go:117] "RemoveContainer" containerID="0b2e9bbf92e51a3d7e2593be1b5ed29afadceb9b66b7a114d28b4ba438b71372" Sep 30 12:49:42 crc kubenswrapper[5002]: I0930 12:49:42.621900 5002 scope.go:117] "RemoveContainer" containerID="1fb42e09be404019f6f83adc5f150f7da4c40df99782a5d5437b642079a7a848" Sep 30 12:49:42 crc kubenswrapper[5002]: I0930 12:49:42.683944 5002 scope.go:117] "RemoveContainer" containerID="63f825fabef76f0e73f621d2a6fddaa3a422602c6bde058a11e8940acb387cd5" Sep 30 12:49:43 crc kubenswrapper[5002]: I0930 12:49:43.025111 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv"] Sep 30 12:49:43 crc kubenswrapper[5002]: E0930 12:49:43.025515 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36ca4049-6444-4a67-b607-b15095a3dabf" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 12:49:43 crc kubenswrapper[5002]: I0930 12:49:43.025534 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="36ca4049-6444-4a67-b607-b15095a3dabf" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 12:49:43 crc kubenswrapper[5002]: I0930 12:49:43.025717 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="36ca4049-6444-4a67-b607-b15095a3dabf" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 12:49:43 crc kubenswrapper[5002]: I0930 12:49:43.026311 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv" Sep 30 12:49:43 crc kubenswrapper[5002]: I0930 12:49:43.028214 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 12:49:43 crc kubenswrapper[5002]: I0930 12:49:43.028603 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 12:49:43 crc kubenswrapper[5002]: I0930 12:49:43.028916 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-ddhrk" Sep 30 12:49:43 crc kubenswrapper[5002]: I0930 12:49:43.033459 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 12:49:43 crc kubenswrapper[5002]: I0930 12:49:43.043277 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv"] Sep 30 12:49:43 crc kubenswrapper[5002]: I0930 12:49:43.178269 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmvkz\" (UniqueName: \"kubernetes.io/projected/f4f8c21f-af18-49de-8a07-140d16e9785f-kube-api-access-fmvkz\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv\" (UID: \"f4f8c21f-af18-49de-8a07-140d16e9785f\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv" Sep 30 12:49:43 crc kubenswrapper[5002]: I0930 12:49:43.178344 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f4f8c21f-af18-49de-8a07-140d16e9785f-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv\" (UID: \"f4f8c21f-af18-49de-8a07-140d16e9785f\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv" Sep 30 12:49:43 crc kubenswrapper[5002]: I0930 12:49:43.178497 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f4f8c21f-af18-49de-8a07-140d16e9785f-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv\" (UID: \"f4f8c21f-af18-49de-8a07-140d16e9785f\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv" Sep 30 12:49:43 crc kubenswrapper[5002]: I0930 12:49:43.280045 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmvkz\" (UniqueName: \"kubernetes.io/projected/f4f8c21f-af18-49de-8a07-140d16e9785f-kube-api-access-fmvkz\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv\" (UID: \"f4f8c21f-af18-49de-8a07-140d16e9785f\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv" Sep 30 12:49:43 crc kubenswrapper[5002]: I0930 12:49:43.280135 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f4f8c21f-af18-49de-8a07-140d16e9785f-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv\" (UID: \"f4f8c21f-af18-49de-8a07-140d16e9785f\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv" Sep 30 12:49:43 crc kubenswrapper[5002]: I0930 12:49:43.280190 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f4f8c21f-af18-49de-8a07-140d16e9785f-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv\" (UID: \"f4f8c21f-af18-49de-8a07-140d16e9785f\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv" Sep 30 12:49:43 crc kubenswrapper[5002]: I0930 12:49:43.286530 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f4f8c21f-af18-49de-8a07-140d16e9785f-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv\" (UID: \"f4f8c21f-af18-49de-8a07-140d16e9785f\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv" Sep 30 12:49:43 crc kubenswrapper[5002]: I0930 12:49:43.286568 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f4f8c21f-af18-49de-8a07-140d16e9785f-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv\" (UID: \"f4f8c21f-af18-49de-8a07-140d16e9785f\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv" Sep 30 12:49:43 crc kubenswrapper[5002]: I0930 12:49:43.297503 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmvkz\" (UniqueName: \"kubernetes.io/projected/f4f8c21f-af18-49de-8a07-140d16e9785f-kube-api-access-fmvkz\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv\" (UID: \"f4f8c21f-af18-49de-8a07-140d16e9785f\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv" Sep 30 12:49:43 crc kubenswrapper[5002]: I0930 12:49:43.345332 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv" Sep 30 12:49:44 crc kubenswrapper[5002]: I0930 12:49:43.857659 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv"] Sep 30 12:49:44 crc kubenswrapper[5002]: I0930 12:49:44.623947 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv" event={"ID":"f4f8c21f-af18-49de-8a07-140d16e9785f","Type":"ContainerStarted","Data":"3b1ddb651bdbb94457b31e7855c8ff4d363dd56e3cc9fa50e8338cac1a35ed7e"} Sep 30 12:49:44 crc kubenswrapper[5002]: I0930 12:49:44.624357 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv" event={"ID":"f4f8c21f-af18-49de-8a07-140d16e9785f","Type":"ContainerStarted","Data":"a4d9ee88f97d5c43f0c0e2dd112b88691107e74f625a9bef727f70b78179853f"} Sep 30 12:49:44 crc kubenswrapper[5002]: I0930 12:49:44.640740 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv" podStartSLOduration=1.194304868 podStartE2EDuration="1.640713866s" podCreationTimestamp="2025-09-30 12:49:43 +0000 UTC" firstStartedPulling="2025-09-30 12:49:43.860999372 +0000 UTC m=+1758.110681508" lastFinishedPulling="2025-09-30 12:49:44.30740836 +0000 UTC m=+1758.557090506" observedRunningTime="2025-09-30 12:49:44.637819962 +0000 UTC m=+1758.887511638" watchObservedRunningTime="2025-09-30 12:49:44.640713866 +0000 UTC m=+1758.890396012" Sep 30 12:49:55 crc kubenswrapper[5002]: I0930 12:49:55.676356 5002 scope.go:117] "RemoveContainer" containerID="a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677" Sep 30 12:49:55 crc kubenswrapper[5002]: E0930 12:49:55.677203 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:50:06 crc kubenswrapper[5002]: I0930 12:50:06.052915 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-5rtsz"] Sep 30 12:50:06 crc kubenswrapper[5002]: I0930 12:50:06.064059 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-5rtsz"] Sep 30 12:50:06 crc kubenswrapper[5002]: I0930 12:50:06.687920 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6" path="/var/lib/kubelet/pods/df974acb-ce3b-47d1-a5f5-a6f5ad8c95c6/volumes" Sep 30 12:50:08 crc kubenswrapper[5002]: I0930 12:50:08.676617 5002 scope.go:117] "RemoveContainer" containerID="a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677" Sep 30 12:50:09 crc kubenswrapper[5002]: I0930 12:50:09.850519 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerStarted","Data":"83f28e70b07c52d080e9624494f8eff05394195f3007fb1fb9e6b51a22eef012"} Sep 30 12:50:28 crc kubenswrapper[5002]: I0930 12:50:28.012205 5002 generic.go:334] "Generic (PLEG): container finished" podID="f4f8c21f-af18-49de-8a07-140d16e9785f" containerID="3b1ddb651bdbb94457b31e7855c8ff4d363dd56e3cc9fa50e8338cac1a35ed7e" exitCode=0 Sep 30 12:50:28 crc kubenswrapper[5002]: I0930 12:50:28.012285 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv" event={"ID":"f4f8c21f-af18-49de-8a07-140d16e9785f","Type":"ContainerDied","Data":"3b1ddb651bdbb94457b31e7855c8ff4d363dd56e3cc9fa50e8338cac1a35ed7e"} Sep 30 12:50:29 crc kubenswrapper[5002]: I0930 12:50:29.384495 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv" Sep 30 12:50:29 crc kubenswrapper[5002]: I0930 12:50:29.427462 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f4f8c21f-af18-49de-8a07-140d16e9785f-inventory\") pod \"f4f8c21f-af18-49de-8a07-140d16e9785f\" (UID: \"f4f8c21f-af18-49de-8a07-140d16e9785f\") " Sep 30 12:50:29 crc kubenswrapper[5002]: I0930 12:50:29.427612 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f4f8c21f-af18-49de-8a07-140d16e9785f-ssh-key\") pod \"f4f8c21f-af18-49de-8a07-140d16e9785f\" (UID: \"f4f8c21f-af18-49de-8a07-140d16e9785f\") " Sep 30 12:50:29 crc kubenswrapper[5002]: I0930 12:50:29.427752 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fmvkz\" (UniqueName: \"kubernetes.io/projected/f4f8c21f-af18-49de-8a07-140d16e9785f-kube-api-access-fmvkz\") pod \"f4f8c21f-af18-49de-8a07-140d16e9785f\" (UID: \"f4f8c21f-af18-49de-8a07-140d16e9785f\") " Sep 30 12:50:29 crc kubenswrapper[5002]: I0930 12:50:29.432462 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4f8c21f-af18-49de-8a07-140d16e9785f-kube-api-access-fmvkz" (OuterVolumeSpecName: "kube-api-access-fmvkz") pod "f4f8c21f-af18-49de-8a07-140d16e9785f" (UID: "f4f8c21f-af18-49de-8a07-140d16e9785f"). InnerVolumeSpecName "kube-api-access-fmvkz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:50:29 crc kubenswrapper[5002]: I0930 12:50:29.452404 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4f8c21f-af18-49de-8a07-140d16e9785f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f4f8c21f-af18-49de-8a07-140d16e9785f" (UID: "f4f8c21f-af18-49de-8a07-140d16e9785f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:50:29 crc kubenswrapper[5002]: I0930 12:50:29.453673 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4f8c21f-af18-49de-8a07-140d16e9785f-inventory" (OuterVolumeSpecName: "inventory") pod "f4f8c21f-af18-49de-8a07-140d16e9785f" (UID: "f4f8c21f-af18-49de-8a07-140d16e9785f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:50:29 crc kubenswrapper[5002]: I0930 12:50:29.529994 5002 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f4f8c21f-af18-49de-8a07-140d16e9785f-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 12:50:29 crc kubenswrapper[5002]: I0930 12:50:29.530034 5002 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f4f8c21f-af18-49de-8a07-140d16e9785f-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 12:50:29 crc kubenswrapper[5002]: I0930 12:50:29.530045 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fmvkz\" (UniqueName: \"kubernetes.io/projected/f4f8c21f-af18-49de-8a07-140d16e9785f-kube-api-access-fmvkz\") on node \"crc\" DevicePath \"\"" Sep 30 12:50:30 crc kubenswrapper[5002]: I0930 12:50:30.030161 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv" event={"ID":"f4f8c21f-af18-49de-8a07-140d16e9785f","Type":"ContainerDied","Data":"a4d9ee88f97d5c43f0c0e2dd112b88691107e74f625a9bef727f70b78179853f"} Sep 30 12:50:30 crc kubenswrapper[5002]: I0930 12:50:30.030211 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a4d9ee88f97d5c43f0c0e2dd112b88691107e74f625a9bef727f70b78179853f" Sep 30 12:50:30 crc kubenswrapper[5002]: I0930 12:50:30.030218 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv" Sep 30 12:50:30 crc kubenswrapper[5002]: I0930 12:50:30.112370 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-w2tmz"] Sep 30 12:50:30 crc kubenswrapper[5002]: E0930 12:50:30.112912 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4f8c21f-af18-49de-8a07-140d16e9785f" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 12:50:30 crc kubenswrapper[5002]: I0930 12:50:30.112935 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4f8c21f-af18-49de-8a07-140d16e9785f" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 12:50:30 crc kubenswrapper[5002]: I0930 12:50:30.113183 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4f8c21f-af18-49de-8a07-140d16e9785f" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 12:50:30 crc kubenswrapper[5002]: I0930 12:50:30.114133 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-w2tmz" Sep 30 12:50:30 crc kubenswrapper[5002]: I0930 12:50:30.115922 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-ddhrk" Sep 30 12:50:30 crc kubenswrapper[5002]: I0930 12:50:30.116092 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 12:50:30 crc kubenswrapper[5002]: I0930 12:50:30.116273 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 12:50:30 crc kubenswrapper[5002]: I0930 12:50:30.117337 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 12:50:30 crc kubenswrapper[5002]: I0930 12:50:30.121918 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-w2tmz"] Sep 30 12:50:30 crc kubenswrapper[5002]: I0930 12:50:30.143340 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/13e7455b-ca1c-475f-95af-c9813c0876f7-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-w2tmz\" (UID: \"13e7455b-ca1c-475f-95af-c9813c0876f7\") " pod="openstack/ssh-known-hosts-edpm-deployment-w2tmz" Sep 30 12:50:30 crc kubenswrapper[5002]: I0930 12:50:30.143455 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/13e7455b-ca1c-475f-95af-c9813c0876f7-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-w2tmz\" (UID: \"13e7455b-ca1c-475f-95af-c9813c0876f7\") " pod="openstack/ssh-known-hosts-edpm-deployment-w2tmz" Sep 30 12:50:30 crc kubenswrapper[5002]: I0930 12:50:30.143545 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dl4nz\" (UniqueName: \"kubernetes.io/projected/13e7455b-ca1c-475f-95af-c9813c0876f7-kube-api-access-dl4nz\") pod \"ssh-known-hosts-edpm-deployment-w2tmz\" (UID: \"13e7455b-ca1c-475f-95af-c9813c0876f7\") " pod="openstack/ssh-known-hosts-edpm-deployment-w2tmz" Sep 30 12:50:30 crc kubenswrapper[5002]: I0930 12:50:30.245587 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dl4nz\" (UniqueName: \"kubernetes.io/projected/13e7455b-ca1c-475f-95af-c9813c0876f7-kube-api-access-dl4nz\") pod \"ssh-known-hosts-edpm-deployment-w2tmz\" (UID: \"13e7455b-ca1c-475f-95af-c9813c0876f7\") " pod="openstack/ssh-known-hosts-edpm-deployment-w2tmz" Sep 30 12:50:30 crc kubenswrapper[5002]: I0930 12:50:30.245868 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/13e7455b-ca1c-475f-95af-c9813c0876f7-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-w2tmz\" (UID: \"13e7455b-ca1c-475f-95af-c9813c0876f7\") " pod="openstack/ssh-known-hosts-edpm-deployment-w2tmz" Sep 30 12:50:30 crc kubenswrapper[5002]: I0930 12:50:30.246016 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/13e7455b-ca1c-475f-95af-c9813c0876f7-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-w2tmz\" (UID: \"13e7455b-ca1c-475f-95af-c9813c0876f7\") " pod="openstack/ssh-known-hosts-edpm-deployment-w2tmz" Sep 30 12:50:30 crc kubenswrapper[5002]: I0930 12:50:30.250607 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/13e7455b-ca1c-475f-95af-c9813c0876f7-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-w2tmz\" (UID: \"13e7455b-ca1c-475f-95af-c9813c0876f7\") " pod="openstack/ssh-known-hosts-edpm-deployment-w2tmz" Sep 30 12:50:30 crc kubenswrapper[5002]: I0930 12:50:30.250905 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/13e7455b-ca1c-475f-95af-c9813c0876f7-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-w2tmz\" (UID: \"13e7455b-ca1c-475f-95af-c9813c0876f7\") " pod="openstack/ssh-known-hosts-edpm-deployment-w2tmz" Sep 30 12:50:30 crc kubenswrapper[5002]: I0930 12:50:30.263500 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dl4nz\" (UniqueName: \"kubernetes.io/projected/13e7455b-ca1c-475f-95af-c9813c0876f7-kube-api-access-dl4nz\") pod \"ssh-known-hosts-edpm-deployment-w2tmz\" (UID: \"13e7455b-ca1c-475f-95af-c9813c0876f7\") " pod="openstack/ssh-known-hosts-edpm-deployment-w2tmz" Sep 30 12:50:30 crc kubenswrapper[5002]: I0930 12:50:30.437807 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-w2tmz" Sep 30 12:50:30 crc kubenswrapper[5002]: I0930 12:50:30.939372 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-w2tmz"] Sep 30 12:50:31 crc kubenswrapper[5002]: I0930 12:50:31.038852 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-w2tmz" event={"ID":"13e7455b-ca1c-475f-95af-c9813c0876f7","Type":"ContainerStarted","Data":"e8019c4cb3f75fd1f0adc9435b76c4a9ea238e508620e2d6e64200dd3e1aa579"} Sep 30 12:50:32 crc kubenswrapper[5002]: I0930 12:50:32.049613 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-w2tmz" event={"ID":"13e7455b-ca1c-475f-95af-c9813c0876f7","Type":"ContainerStarted","Data":"90f8ab092175f21ae014b6aaadb8d89164b658341ba4c6ad6a1c7a5dd3f96ff3"} Sep 30 12:50:32 crc kubenswrapper[5002]: I0930 12:50:32.067668 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-w2tmz" podStartSLOduration=1.612881963 podStartE2EDuration="2.067652716s" podCreationTimestamp="2025-09-30 12:50:30 +0000 UTC" firstStartedPulling="2025-09-30 12:50:30.959976542 +0000 UTC m=+1805.209658688" lastFinishedPulling="2025-09-30 12:50:31.414747295 +0000 UTC m=+1805.664429441" observedRunningTime="2025-09-30 12:50:32.063566421 +0000 UTC m=+1806.313248587" watchObservedRunningTime="2025-09-30 12:50:32.067652716 +0000 UTC m=+1806.317334862" Sep 30 12:50:39 crc kubenswrapper[5002]: I0930 12:50:39.109753 5002 generic.go:334] "Generic (PLEG): container finished" podID="13e7455b-ca1c-475f-95af-c9813c0876f7" containerID="90f8ab092175f21ae014b6aaadb8d89164b658341ba4c6ad6a1c7a5dd3f96ff3" exitCode=0 Sep 30 12:50:39 crc kubenswrapper[5002]: I0930 12:50:39.109861 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-w2tmz" event={"ID":"13e7455b-ca1c-475f-95af-c9813c0876f7","Type":"ContainerDied","Data":"90f8ab092175f21ae014b6aaadb8d89164b658341ba4c6ad6a1c7a5dd3f96ff3"} Sep 30 12:50:40 crc kubenswrapper[5002]: I0930 12:50:40.493912 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-w2tmz" Sep 30 12:50:40 crc kubenswrapper[5002]: I0930 12:50:40.539154 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/13e7455b-ca1c-475f-95af-c9813c0876f7-inventory-0\") pod \"13e7455b-ca1c-475f-95af-c9813c0876f7\" (UID: \"13e7455b-ca1c-475f-95af-c9813c0876f7\") " Sep 30 12:50:40 crc kubenswrapper[5002]: I0930 12:50:40.539393 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dl4nz\" (UniqueName: \"kubernetes.io/projected/13e7455b-ca1c-475f-95af-c9813c0876f7-kube-api-access-dl4nz\") pod \"13e7455b-ca1c-475f-95af-c9813c0876f7\" (UID: \"13e7455b-ca1c-475f-95af-c9813c0876f7\") " Sep 30 12:50:40 crc kubenswrapper[5002]: I0930 12:50:40.539534 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/13e7455b-ca1c-475f-95af-c9813c0876f7-ssh-key-openstack-edpm-ipam\") pod \"13e7455b-ca1c-475f-95af-c9813c0876f7\" (UID: \"13e7455b-ca1c-475f-95af-c9813c0876f7\") " Sep 30 12:50:40 crc kubenswrapper[5002]: I0930 12:50:40.545285 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13e7455b-ca1c-475f-95af-c9813c0876f7-kube-api-access-dl4nz" (OuterVolumeSpecName: "kube-api-access-dl4nz") pod "13e7455b-ca1c-475f-95af-c9813c0876f7" (UID: "13e7455b-ca1c-475f-95af-c9813c0876f7"). InnerVolumeSpecName "kube-api-access-dl4nz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:50:40 crc kubenswrapper[5002]: I0930 12:50:40.567725 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13e7455b-ca1c-475f-95af-c9813c0876f7-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "13e7455b-ca1c-475f-95af-c9813c0876f7" (UID: "13e7455b-ca1c-475f-95af-c9813c0876f7"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:50:40 crc kubenswrapper[5002]: I0930 12:50:40.575200 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13e7455b-ca1c-475f-95af-c9813c0876f7-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "13e7455b-ca1c-475f-95af-c9813c0876f7" (UID: "13e7455b-ca1c-475f-95af-c9813c0876f7"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:50:40 crc kubenswrapper[5002]: I0930 12:50:40.641411 5002 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/13e7455b-ca1c-475f-95af-c9813c0876f7-inventory-0\") on node \"crc\" DevicePath \"\"" Sep 30 12:50:40 crc kubenswrapper[5002]: I0930 12:50:40.641446 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dl4nz\" (UniqueName: \"kubernetes.io/projected/13e7455b-ca1c-475f-95af-c9813c0876f7-kube-api-access-dl4nz\") on node \"crc\" DevicePath \"\"" Sep 30 12:50:40 crc kubenswrapper[5002]: I0930 12:50:40.641459 5002 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/13e7455b-ca1c-475f-95af-c9813c0876f7-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Sep 30 12:50:41 crc kubenswrapper[5002]: I0930 12:50:41.126703 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-w2tmz" event={"ID":"13e7455b-ca1c-475f-95af-c9813c0876f7","Type":"ContainerDied","Data":"e8019c4cb3f75fd1f0adc9435b76c4a9ea238e508620e2d6e64200dd3e1aa579"} Sep 30 12:50:41 crc kubenswrapper[5002]: I0930 12:50:41.126751 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e8019c4cb3f75fd1f0adc9435b76c4a9ea238e508620e2d6e64200dd3e1aa579" Sep 30 12:50:41 crc kubenswrapper[5002]: I0930 12:50:41.126767 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-w2tmz" Sep 30 12:50:41 crc kubenswrapper[5002]: I0930 12:50:41.196073 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-f2n6c"] Sep 30 12:50:41 crc kubenswrapper[5002]: E0930 12:50:41.196541 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13e7455b-ca1c-475f-95af-c9813c0876f7" containerName="ssh-known-hosts-edpm-deployment" Sep 30 12:50:41 crc kubenswrapper[5002]: I0930 12:50:41.196566 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="13e7455b-ca1c-475f-95af-c9813c0876f7" containerName="ssh-known-hosts-edpm-deployment" Sep 30 12:50:41 crc kubenswrapper[5002]: I0930 12:50:41.196787 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="13e7455b-ca1c-475f-95af-c9813c0876f7" containerName="ssh-known-hosts-edpm-deployment" Sep 30 12:50:41 crc kubenswrapper[5002]: I0930 12:50:41.197541 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-f2n6c" Sep 30 12:50:41 crc kubenswrapper[5002]: I0930 12:50:41.200055 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 12:50:41 crc kubenswrapper[5002]: I0930 12:50:41.200301 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-ddhrk" Sep 30 12:50:41 crc kubenswrapper[5002]: I0930 12:50:41.201367 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 12:50:41 crc kubenswrapper[5002]: I0930 12:50:41.201580 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 12:50:41 crc kubenswrapper[5002]: I0930 12:50:41.205442 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-f2n6c"] Sep 30 12:50:41 crc kubenswrapper[5002]: I0930 12:50:41.251132 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d552fc4-7e51-426b-99ff-9ed8753f4178-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-f2n6c\" (UID: \"0d552fc4-7e51-426b-99ff-9ed8753f4178\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-f2n6c" Sep 30 12:50:41 crc kubenswrapper[5002]: I0930 12:50:41.251268 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d552fc4-7e51-426b-99ff-9ed8753f4178-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-f2n6c\" (UID: \"0d552fc4-7e51-426b-99ff-9ed8753f4178\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-f2n6c" Sep 30 12:50:41 crc kubenswrapper[5002]: I0930 12:50:41.251366 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kn8cf\" (UniqueName: \"kubernetes.io/projected/0d552fc4-7e51-426b-99ff-9ed8753f4178-kube-api-access-kn8cf\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-f2n6c\" (UID: \"0d552fc4-7e51-426b-99ff-9ed8753f4178\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-f2n6c" Sep 30 12:50:41 crc kubenswrapper[5002]: I0930 12:50:41.352858 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kn8cf\" (UniqueName: \"kubernetes.io/projected/0d552fc4-7e51-426b-99ff-9ed8753f4178-kube-api-access-kn8cf\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-f2n6c\" (UID: \"0d552fc4-7e51-426b-99ff-9ed8753f4178\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-f2n6c" Sep 30 12:50:41 crc kubenswrapper[5002]: I0930 12:50:41.352966 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d552fc4-7e51-426b-99ff-9ed8753f4178-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-f2n6c\" (UID: \"0d552fc4-7e51-426b-99ff-9ed8753f4178\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-f2n6c" Sep 30 12:50:41 crc kubenswrapper[5002]: I0930 12:50:41.353016 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d552fc4-7e51-426b-99ff-9ed8753f4178-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-f2n6c\" (UID: \"0d552fc4-7e51-426b-99ff-9ed8753f4178\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-f2n6c" Sep 30 12:50:41 crc kubenswrapper[5002]: I0930 12:50:41.357111 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d552fc4-7e51-426b-99ff-9ed8753f4178-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-f2n6c\" (UID: \"0d552fc4-7e51-426b-99ff-9ed8753f4178\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-f2n6c" Sep 30 12:50:41 crc kubenswrapper[5002]: I0930 12:50:41.365027 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d552fc4-7e51-426b-99ff-9ed8753f4178-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-f2n6c\" (UID: \"0d552fc4-7e51-426b-99ff-9ed8753f4178\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-f2n6c" Sep 30 12:50:41 crc kubenswrapper[5002]: I0930 12:50:41.372131 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kn8cf\" (UniqueName: \"kubernetes.io/projected/0d552fc4-7e51-426b-99ff-9ed8753f4178-kube-api-access-kn8cf\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-f2n6c\" (UID: \"0d552fc4-7e51-426b-99ff-9ed8753f4178\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-f2n6c" Sep 30 12:50:41 crc kubenswrapper[5002]: I0930 12:50:41.513729 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-f2n6c" Sep 30 12:50:42 crc kubenswrapper[5002]: I0930 12:50:42.131639 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-f2n6c"] Sep 30 12:50:42 crc kubenswrapper[5002]: I0930 12:50:42.787224 5002 scope.go:117] "RemoveContainer" containerID="d39c28ef837a1a5c0ad1fbcf5c80102a2063ce44028c7f0f673ccbb8313d06b5" Sep 30 12:50:43 crc kubenswrapper[5002]: I0930 12:50:43.142228 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-f2n6c" event={"ID":"0d552fc4-7e51-426b-99ff-9ed8753f4178","Type":"ContainerStarted","Data":"b0362dbcf9f45f88ee04f2075e79bed9373c05b61deabba13cef5d5baf4a5c0f"} Sep 30 12:50:43 crc kubenswrapper[5002]: I0930 12:50:43.142268 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-f2n6c" event={"ID":"0d552fc4-7e51-426b-99ff-9ed8753f4178","Type":"ContainerStarted","Data":"1d4b17bd61298fc3998010337714133660232c0f5e322a5ac436b7931f0a8833"} Sep 30 12:50:43 crc kubenswrapper[5002]: I0930 12:50:43.163593 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-f2n6c" podStartSLOduration=1.624310709 podStartE2EDuration="2.163576593s" podCreationTimestamp="2025-09-30 12:50:41 +0000 UTC" firstStartedPulling="2025-09-30 12:50:42.132883539 +0000 UTC m=+1816.382565685" lastFinishedPulling="2025-09-30 12:50:42.672149423 +0000 UTC m=+1816.921831569" observedRunningTime="2025-09-30 12:50:43.158924184 +0000 UTC m=+1817.408606340" watchObservedRunningTime="2025-09-30 12:50:43.163576593 +0000 UTC m=+1817.413258739" Sep 30 12:50:51 crc kubenswrapper[5002]: I0930 12:50:51.205906 5002 generic.go:334] "Generic (PLEG): container finished" podID="0d552fc4-7e51-426b-99ff-9ed8753f4178" containerID="b0362dbcf9f45f88ee04f2075e79bed9373c05b61deabba13cef5d5baf4a5c0f" exitCode=0 Sep 30 12:50:51 crc kubenswrapper[5002]: I0930 12:50:51.206000 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-f2n6c" event={"ID":"0d552fc4-7e51-426b-99ff-9ed8753f4178","Type":"ContainerDied","Data":"b0362dbcf9f45f88ee04f2075e79bed9373c05b61deabba13cef5d5baf4a5c0f"} Sep 30 12:50:52 crc kubenswrapper[5002]: I0930 12:50:52.606102 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-f2n6c" Sep 30 12:50:52 crc kubenswrapper[5002]: I0930 12:50:52.664113 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d552fc4-7e51-426b-99ff-9ed8753f4178-inventory\") pod \"0d552fc4-7e51-426b-99ff-9ed8753f4178\" (UID: \"0d552fc4-7e51-426b-99ff-9ed8753f4178\") " Sep 30 12:50:52 crc kubenswrapper[5002]: I0930 12:50:52.664297 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d552fc4-7e51-426b-99ff-9ed8753f4178-ssh-key\") pod \"0d552fc4-7e51-426b-99ff-9ed8753f4178\" (UID: \"0d552fc4-7e51-426b-99ff-9ed8753f4178\") " Sep 30 12:50:52 crc kubenswrapper[5002]: I0930 12:50:52.664641 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kn8cf\" (UniqueName: \"kubernetes.io/projected/0d552fc4-7e51-426b-99ff-9ed8753f4178-kube-api-access-kn8cf\") pod \"0d552fc4-7e51-426b-99ff-9ed8753f4178\" (UID: \"0d552fc4-7e51-426b-99ff-9ed8753f4178\") " Sep 30 12:50:52 crc kubenswrapper[5002]: I0930 12:50:52.670083 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d552fc4-7e51-426b-99ff-9ed8753f4178-kube-api-access-kn8cf" (OuterVolumeSpecName: "kube-api-access-kn8cf") pod "0d552fc4-7e51-426b-99ff-9ed8753f4178" (UID: "0d552fc4-7e51-426b-99ff-9ed8753f4178"). InnerVolumeSpecName "kube-api-access-kn8cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:50:52 crc kubenswrapper[5002]: I0930 12:50:52.693772 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d552fc4-7e51-426b-99ff-9ed8753f4178-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0d552fc4-7e51-426b-99ff-9ed8753f4178" (UID: "0d552fc4-7e51-426b-99ff-9ed8753f4178"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:50:52 crc kubenswrapper[5002]: I0930 12:50:52.699209 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d552fc4-7e51-426b-99ff-9ed8753f4178-inventory" (OuterVolumeSpecName: "inventory") pod "0d552fc4-7e51-426b-99ff-9ed8753f4178" (UID: "0d552fc4-7e51-426b-99ff-9ed8753f4178"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:50:52 crc kubenswrapper[5002]: I0930 12:50:52.771028 5002 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d552fc4-7e51-426b-99ff-9ed8753f4178-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 12:50:52 crc kubenswrapper[5002]: I0930 12:50:52.771061 5002 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d552fc4-7e51-426b-99ff-9ed8753f4178-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 12:50:52 crc kubenswrapper[5002]: I0930 12:50:52.771096 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kn8cf\" (UniqueName: \"kubernetes.io/projected/0d552fc4-7e51-426b-99ff-9ed8753f4178-kube-api-access-kn8cf\") on node \"crc\" DevicePath \"\"" Sep 30 12:50:53 crc kubenswrapper[5002]: I0930 12:50:53.226465 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-f2n6c" event={"ID":"0d552fc4-7e51-426b-99ff-9ed8753f4178","Type":"ContainerDied","Data":"1d4b17bd61298fc3998010337714133660232c0f5e322a5ac436b7931f0a8833"} Sep 30 12:50:53 crc kubenswrapper[5002]: I0930 12:50:53.226522 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1d4b17bd61298fc3998010337714133660232c0f5e322a5ac436b7931f0a8833" Sep 30 12:50:53 crc kubenswrapper[5002]: I0930 12:50:53.226605 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-f2n6c" Sep 30 12:50:53 crc kubenswrapper[5002]: I0930 12:50:53.292391 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7"] Sep 30 12:50:53 crc kubenswrapper[5002]: E0930 12:50:53.293129 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d552fc4-7e51-426b-99ff-9ed8753f4178" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 30 12:50:53 crc kubenswrapper[5002]: I0930 12:50:53.293212 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d552fc4-7e51-426b-99ff-9ed8753f4178" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 30 12:50:53 crc kubenswrapper[5002]: I0930 12:50:53.293575 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d552fc4-7e51-426b-99ff-9ed8753f4178" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 30 12:50:53 crc kubenswrapper[5002]: I0930 12:50:53.294282 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7" Sep 30 12:50:53 crc kubenswrapper[5002]: I0930 12:50:53.296362 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 12:50:53 crc kubenswrapper[5002]: I0930 12:50:53.297545 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 12:50:53 crc kubenswrapper[5002]: I0930 12:50:53.297961 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 12:50:53 crc kubenswrapper[5002]: I0930 12:50:53.298207 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-ddhrk" Sep 30 12:50:53 crc kubenswrapper[5002]: I0930 12:50:53.308113 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7"] Sep 30 12:50:53 crc kubenswrapper[5002]: I0930 12:50:53.381942 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d6667a58-0566-4c47-8516-b46bed2a0f65-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7\" (UID: \"d6667a58-0566-4c47-8516-b46bed2a0f65\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7" Sep 30 12:50:53 crc kubenswrapper[5002]: I0930 12:50:53.382197 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d6667a58-0566-4c47-8516-b46bed2a0f65-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7\" (UID: \"d6667a58-0566-4c47-8516-b46bed2a0f65\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7" Sep 30 12:50:53 crc kubenswrapper[5002]: I0930 12:50:53.382288 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6m5x7\" (UniqueName: \"kubernetes.io/projected/d6667a58-0566-4c47-8516-b46bed2a0f65-kube-api-access-6m5x7\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7\" (UID: \"d6667a58-0566-4c47-8516-b46bed2a0f65\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7" Sep 30 12:50:53 crc kubenswrapper[5002]: I0930 12:50:53.484387 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6m5x7\" (UniqueName: \"kubernetes.io/projected/d6667a58-0566-4c47-8516-b46bed2a0f65-kube-api-access-6m5x7\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7\" (UID: \"d6667a58-0566-4c47-8516-b46bed2a0f65\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7" Sep 30 12:50:53 crc kubenswrapper[5002]: I0930 12:50:53.484525 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d6667a58-0566-4c47-8516-b46bed2a0f65-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7\" (UID: \"d6667a58-0566-4c47-8516-b46bed2a0f65\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7" Sep 30 12:50:53 crc kubenswrapper[5002]: I0930 12:50:53.484546 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d6667a58-0566-4c47-8516-b46bed2a0f65-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7\" (UID: \"d6667a58-0566-4c47-8516-b46bed2a0f65\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7" Sep 30 12:50:53 crc kubenswrapper[5002]: I0930 12:50:53.488958 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d6667a58-0566-4c47-8516-b46bed2a0f65-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7\" (UID: \"d6667a58-0566-4c47-8516-b46bed2a0f65\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7" Sep 30 12:50:53 crc kubenswrapper[5002]: I0930 12:50:53.489363 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d6667a58-0566-4c47-8516-b46bed2a0f65-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7\" (UID: \"d6667a58-0566-4c47-8516-b46bed2a0f65\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7" Sep 30 12:50:53 crc kubenswrapper[5002]: I0930 12:50:53.500407 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6m5x7\" (UniqueName: \"kubernetes.io/projected/d6667a58-0566-4c47-8516-b46bed2a0f65-kube-api-access-6m5x7\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7\" (UID: \"d6667a58-0566-4c47-8516-b46bed2a0f65\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7" Sep 30 12:50:53 crc kubenswrapper[5002]: I0930 12:50:53.620876 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7" Sep 30 12:50:54 crc kubenswrapper[5002]: I0930 12:50:54.128063 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7"] Sep 30 12:50:54 crc kubenswrapper[5002]: I0930 12:50:54.239456 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7" event={"ID":"d6667a58-0566-4c47-8516-b46bed2a0f65","Type":"ContainerStarted","Data":"ec39aa0eac2a9d4cdaf128f39648d9d9f269bf85c5c8e60bd5532c479d134cb3"} Sep 30 12:50:55 crc kubenswrapper[5002]: I0930 12:50:55.248914 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7" event={"ID":"d6667a58-0566-4c47-8516-b46bed2a0f65","Type":"ContainerStarted","Data":"ed014fa9605e2fbd831750b194bf1b166591a5c19f6031249d8ba9260fde5309"} Sep 30 12:50:55 crc kubenswrapper[5002]: I0930 12:50:55.271299 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7" podStartSLOduration=1.763505303 podStartE2EDuration="2.271281901s" podCreationTimestamp="2025-09-30 12:50:53 +0000 UTC" firstStartedPulling="2025-09-30 12:50:54.136913795 +0000 UTC m=+1828.386595941" lastFinishedPulling="2025-09-30 12:50:54.644690383 +0000 UTC m=+1828.894372539" observedRunningTime="2025-09-30 12:50:55.264059986 +0000 UTC m=+1829.513742152" watchObservedRunningTime="2025-09-30 12:50:55.271281901 +0000 UTC m=+1829.520964047" Sep 30 12:51:04 crc kubenswrapper[5002]: I0930 12:51:04.324371 5002 generic.go:334] "Generic (PLEG): container finished" podID="d6667a58-0566-4c47-8516-b46bed2a0f65" containerID="ed014fa9605e2fbd831750b194bf1b166591a5c19f6031249d8ba9260fde5309" exitCode=0 Sep 30 12:51:04 crc kubenswrapper[5002]: I0930 12:51:04.324410 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7" event={"ID":"d6667a58-0566-4c47-8516-b46bed2a0f65","Type":"ContainerDied","Data":"ed014fa9605e2fbd831750b194bf1b166591a5c19f6031249d8ba9260fde5309"} Sep 30 12:51:05 crc kubenswrapper[5002]: I0930 12:51:05.679725 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7" Sep 30 12:51:05 crc kubenswrapper[5002]: I0930 12:51:05.710553 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d6667a58-0566-4c47-8516-b46bed2a0f65-inventory\") pod \"d6667a58-0566-4c47-8516-b46bed2a0f65\" (UID: \"d6667a58-0566-4c47-8516-b46bed2a0f65\") " Sep 30 12:51:05 crc kubenswrapper[5002]: I0930 12:51:05.710781 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d6667a58-0566-4c47-8516-b46bed2a0f65-ssh-key\") pod \"d6667a58-0566-4c47-8516-b46bed2a0f65\" (UID: \"d6667a58-0566-4c47-8516-b46bed2a0f65\") " Sep 30 12:51:05 crc kubenswrapper[5002]: I0930 12:51:05.710918 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6m5x7\" (UniqueName: \"kubernetes.io/projected/d6667a58-0566-4c47-8516-b46bed2a0f65-kube-api-access-6m5x7\") pod \"d6667a58-0566-4c47-8516-b46bed2a0f65\" (UID: \"d6667a58-0566-4c47-8516-b46bed2a0f65\") " Sep 30 12:51:05 crc kubenswrapper[5002]: I0930 12:51:05.717115 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6667a58-0566-4c47-8516-b46bed2a0f65-kube-api-access-6m5x7" (OuterVolumeSpecName: "kube-api-access-6m5x7") pod "d6667a58-0566-4c47-8516-b46bed2a0f65" (UID: "d6667a58-0566-4c47-8516-b46bed2a0f65"). InnerVolumeSpecName "kube-api-access-6m5x7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:51:05 crc kubenswrapper[5002]: I0930 12:51:05.740693 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6667a58-0566-4c47-8516-b46bed2a0f65-inventory" (OuterVolumeSpecName: "inventory") pod "d6667a58-0566-4c47-8516-b46bed2a0f65" (UID: "d6667a58-0566-4c47-8516-b46bed2a0f65"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:51:05 crc kubenswrapper[5002]: I0930 12:51:05.746852 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6667a58-0566-4c47-8516-b46bed2a0f65-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d6667a58-0566-4c47-8516-b46bed2a0f65" (UID: "d6667a58-0566-4c47-8516-b46bed2a0f65"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:51:05 crc kubenswrapper[5002]: I0930 12:51:05.813347 5002 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d6667a58-0566-4c47-8516-b46bed2a0f65-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 12:51:05 crc kubenswrapper[5002]: I0930 12:51:05.813718 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6m5x7\" (UniqueName: \"kubernetes.io/projected/d6667a58-0566-4c47-8516-b46bed2a0f65-kube-api-access-6m5x7\") on node \"crc\" DevicePath \"\"" Sep 30 12:51:05 crc kubenswrapper[5002]: I0930 12:51:05.813846 5002 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d6667a58-0566-4c47-8516-b46bed2a0f65-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.344593 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7" event={"ID":"d6667a58-0566-4c47-8516-b46bed2a0f65","Type":"ContainerDied","Data":"ec39aa0eac2a9d4cdaf128f39648d9d9f269bf85c5c8e60bd5532c479d134cb3"} Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.345076 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec39aa0eac2a9d4cdaf128f39648d9d9f269bf85c5c8e60bd5532c479d134cb3" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.344712 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.439311 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw"] Sep 30 12:51:06 crc kubenswrapper[5002]: E0930 12:51:06.439883 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6667a58-0566-4c47-8516-b46bed2a0f65" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.439914 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6667a58-0566-4c47-8516-b46bed2a0f65" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.440204 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6667a58-0566-4c47-8516-b46bed2a0f65" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.440967 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.443168 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.443232 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.443255 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.443392 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.443628 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.443714 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.444133 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.447346 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-ddhrk" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.467381 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw"] Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.533947 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.534041 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.534097 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.534131 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cghzh\" (UniqueName: \"kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-kube-api-access-cghzh\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.534171 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.534200 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.534234 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.534285 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.534402 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.534452 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.534482 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.534511 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.534700 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.534875 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.636527 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.636614 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.636635 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.636664 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.636700 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.636728 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.636760 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.636788 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.636823 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.636848 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cghzh\" (UniqueName: \"kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-kube-api-access-cghzh\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.636876 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.636893 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.636918 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.636955 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.640546 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.640634 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.640943 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.641309 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.641696 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.641945 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.642259 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.642717 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.643094 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.643528 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.644278 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.644544 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.645101 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.657128 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cghzh\" (UniqueName: \"kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-kube-api-access-cghzh\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:06 crc kubenswrapper[5002]: I0930 12:51:06.758747 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:07 crc kubenswrapper[5002]: I0930 12:51:07.223695 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw"] Sep 30 12:51:07 crc kubenswrapper[5002]: I0930 12:51:07.353061 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" event={"ID":"61c70258-1787-4522-810a-af2ac9e07703","Type":"ContainerStarted","Data":"a4eca92e48fbf86bb1db06cf392916395c88dadd7a8038eafe75b37c307e55d4"} Sep 30 12:51:08 crc kubenswrapper[5002]: I0930 12:51:08.373661 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" event={"ID":"61c70258-1787-4522-810a-af2ac9e07703","Type":"ContainerStarted","Data":"925eae764e3aa4338e0031566eb9b7528ac254eda6ac84a7d459d9491d30c994"} Sep 30 12:51:08 crc kubenswrapper[5002]: I0930 12:51:08.396617 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" podStartSLOduration=1.9236450010000001 podStartE2EDuration="2.396601358s" podCreationTimestamp="2025-09-30 12:51:06 +0000 UTC" firstStartedPulling="2025-09-30 12:51:07.229667619 +0000 UTC m=+1841.479349765" lastFinishedPulling="2025-09-30 12:51:07.702623976 +0000 UTC m=+1841.952306122" observedRunningTime="2025-09-30 12:51:08.393861858 +0000 UTC m=+1842.643544024" watchObservedRunningTime="2025-09-30 12:51:08.396601358 +0000 UTC m=+1842.646283504" Sep 30 12:51:44 crc kubenswrapper[5002]: I0930 12:51:44.707388 5002 generic.go:334] "Generic (PLEG): container finished" podID="61c70258-1787-4522-810a-af2ac9e07703" containerID="925eae764e3aa4338e0031566eb9b7528ac254eda6ac84a7d459d9491d30c994" exitCode=0 Sep 30 12:51:44 crc kubenswrapper[5002]: I0930 12:51:44.707552 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" event={"ID":"61c70258-1787-4522-810a-af2ac9e07703","Type":"ContainerDied","Data":"925eae764e3aa4338e0031566eb9b7528ac254eda6ac84a7d459d9491d30c994"} Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.097004 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.265188 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"61c70258-1787-4522-810a-af2ac9e07703\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.265272 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"61c70258-1787-4522-810a-af2ac9e07703\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.265360 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-repo-setup-combined-ca-bundle\") pod \"61c70258-1787-4522-810a-af2ac9e07703\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.265394 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-openstack-edpm-ipam-ovn-default-certs-0\") pod \"61c70258-1787-4522-810a-af2ac9e07703\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.265434 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-inventory\") pod \"61c70258-1787-4522-810a-af2ac9e07703\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.265524 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-libvirt-combined-ca-bundle\") pod \"61c70258-1787-4522-810a-af2ac9e07703\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.265572 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cghzh\" (UniqueName: \"kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-kube-api-access-cghzh\") pod \"61c70258-1787-4522-810a-af2ac9e07703\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.265607 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-ssh-key\") pod \"61c70258-1787-4522-810a-af2ac9e07703\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.265641 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-nova-combined-ca-bundle\") pod \"61c70258-1787-4522-810a-af2ac9e07703\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.265683 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-bootstrap-combined-ca-bundle\") pod \"61c70258-1787-4522-810a-af2ac9e07703\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.265740 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-neutron-metadata-combined-ca-bundle\") pod \"61c70258-1787-4522-810a-af2ac9e07703\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.265841 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-telemetry-combined-ca-bundle\") pod \"61c70258-1787-4522-810a-af2ac9e07703\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.265875 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"61c70258-1787-4522-810a-af2ac9e07703\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.265962 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-ovn-combined-ca-bundle\") pod \"61c70258-1787-4522-810a-af2ac9e07703\" (UID: \"61c70258-1787-4522-810a-af2ac9e07703\") " Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.274264 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "61c70258-1787-4522-810a-af2ac9e07703" (UID: "61c70258-1787-4522-810a-af2ac9e07703"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.274704 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "61c70258-1787-4522-810a-af2ac9e07703" (UID: "61c70258-1787-4522-810a-af2ac9e07703"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.274758 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "61c70258-1787-4522-810a-af2ac9e07703" (UID: "61c70258-1787-4522-810a-af2ac9e07703"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.275200 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "61c70258-1787-4522-810a-af2ac9e07703" (UID: "61c70258-1787-4522-810a-af2ac9e07703"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.276353 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-kube-api-access-cghzh" (OuterVolumeSpecName: "kube-api-access-cghzh") pod "61c70258-1787-4522-810a-af2ac9e07703" (UID: "61c70258-1787-4522-810a-af2ac9e07703"). InnerVolumeSpecName "kube-api-access-cghzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.276779 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "61c70258-1787-4522-810a-af2ac9e07703" (UID: "61c70258-1787-4522-810a-af2ac9e07703"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.276868 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "61c70258-1787-4522-810a-af2ac9e07703" (UID: "61c70258-1787-4522-810a-af2ac9e07703"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.277399 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "61c70258-1787-4522-810a-af2ac9e07703" (UID: "61c70258-1787-4522-810a-af2ac9e07703"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.277574 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "61c70258-1787-4522-810a-af2ac9e07703" (UID: "61c70258-1787-4522-810a-af2ac9e07703"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.278361 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "61c70258-1787-4522-810a-af2ac9e07703" (UID: "61c70258-1787-4522-810a-af2ac9e07703"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.280010 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "61c70258-1787-4522-810a-af2ac9e07703" (UID: "61c70258-1787-4522-810a-af2ac9e07703"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.280679 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "61c70258-1787-4522-810a-af2ac9e07703" (UID: "61c70258-1787-4522-810a-af2ac9e07703"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.308526 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "61c70258-1787-4522-810a-af2ac9e07703" (UID: "61c70258-1787-4522-810a-af2ac9e07703"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.308655 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-inventory" (OuterVolumeSpecName: "inventory") pod "61c70258-1787-4522-810a-af2ac9e07703" (UID: "61c70258-1787-4522-810a-af2ac9e07703"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.368818 5002 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.368888 5002 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.368905 5002 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.368921 5002 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.368935 5002 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.368948 5002 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.368960 5002 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.368978 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cghzh\" (UniqueName: \"kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-kube-api-access-cghzh\") on node \"crc\" DevicePath \"\"" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.368988 5002 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.368998 5002 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.369008 5002 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.369023 5002 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.369035 5002 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c70258-1787-4522-810a-af2ac9e07703-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.369050 5002 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/61c70258-1787-4522-810a-af2ac9e07703-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.729523 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" event={"ID":"61c70258-1787-4522-810a-af2ac9e07703","Type":"ContainerDied","Data":"a4eca92e48fbf86bb1db06cf392916395c88dadd7a8038eafe75b37c307e55d4"} Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.729579 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a4eca92e48fbf86bb1db06cf392916395c88dadd7a8038eafe75b37c307e55d4" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.729651 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.822613 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-4rc8x"] Sep 30 12:51:46 crc kubenswrapper[5002]: E0930 12:51:46.822991 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61c70258-1787-4522-810a-af2ac9e07703" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.823010 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="61c70258-1787-4522-810a-af2ac9e07703" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.823220 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="61c70258-1787-4522-810a-af2ac9e07703" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.823884 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-4rc8x" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.825696 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.826292 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-ddhrk" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.829747 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.829597 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.830814 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.850051 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-4rc8x"] Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.982382 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/71bf5034-6600-47c8-ad11-2855276e1356-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-4rc8x\" (UID: \"71bf5034-6600-47c8-ad11-2855276e1356\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-4rc8x" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.982427 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71bf5034-6600-47c8-ad11-2855276e1356-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-4rc8x\" (UID: \"71bf5034-6600-47c8-ad11-2855276e1356\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-4rc8x" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.982722 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jl6sh\" (UniqueName: \"kubernetes.io/projected/71bf5034-6600-47c8-ad11-2855276e1356-kube-api-access-jl6sh\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-4rc8x\" (UID: \"71bf5034-6600-47c8-ad11-2855276e1356\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-4rc8x" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.982871 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/71bf5034-6600-47c8-ad11-2855276e1356-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-4rc8x\" (UID: \"71bf5034-6600-47c8-ad11-2855276e1356\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-4rc8x" Sep 30 12:51:46 crc kubenswrapper[5002]: I0930 12:51:46.982947 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71bf5034-6600-47c8-ad11-2855276e1356-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-4rc8x\" (UID: \"71bf5034-6600-47c8-ad11-2855276e1356\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-4rc8x" Sep 30 12:51:47 crc kubenswrapper[5002]: I0930 12:51:47.084851 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jl6sh\" (UniqueName: \"kubernetes.io/projected/71bf5034-6600-47c8-ad11-2855276e1356-kube-api-access-jl6sh\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-4rc8x\" (UID: \"71bf5034-6600-47c8-ad11-2855276e1356\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-4rc8x" Sep 30 12:51:47 crc kubenswrapper[5002]: I0930 12:51:47.084984 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/71bf5034-6600-47c8-ad11-2855276e1356-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-4rc8x\" (UID: \"71bf5034-6600-47c8-ad11-2855276e1356\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-4rc8x" Sep 30 12:51:47 crc kubenswrapper[5002]: I0930 12:51:47.085064 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71bf5034-6600-47c8-ad11-2855276e1356-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-4rc8x\" (UID: \"71bf5034-6600-47c8-ad11-2855276e1356\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-4rc8x" Sep 30 12:51:47 crc kubenswrapper[5002]: I0930 12:51:47.085299 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/71bf5034-6600-47c8-ad11-2855276e1356-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-4rc8x\" (UID: \"71bf5034-6600-47c8-ad11-2855276e1356\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-4rc8x" Sep 30 12:51:47 crc kubenswrapper[5002]: I0930 12:51:47.085361 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71bf5034-6600-47c8-ad11-2855276e1356-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-4rc8x\" (UID: \"71bf5034-6600-47c8-ad11-2855276e1356\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-4rc8x" Sep 30 12:51:47 crc kubenswrapper[5002]: I0930 12:51:47.086411 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/71bf5034-6600-47c8-ad11-2855276e1356-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-4rc8x\" (UID: \"71bf5034-6600-47c8-ad11-2855276e1356\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-4rc8x" Sep 30 12:51:47 crc kubenswrapper[5002]: I0930 12:51:47.089231 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/71bf5034-6600-47c8-ad11-2855276e1356-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-4rc8x\" (UID: \"71bf5034-6600-47c8-ad11-2855276e1356\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-4rc8x" Sep 30 12:51:47 crc kubenswrapper[5002]: I0930 12:51:47.089498 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71bf5034-6600-47c8-ad11-2855276e1356-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-4rc8x\" (UID: \"71bf5034-6600-47c8-ad11-2855276e1356\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-4rc8x" Sep 30 12:51:47 crc kubenswrapper[5002]: I0930 12:51:47.103777 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jl6sh\" (UniqueName: \"kubernetes.io/projected/71bf5034-6600-47c8-ad11-2855276e1356-kube-api-access-jl6sh\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-4rc8x\" (UID: \"71bf5034-6600-47c8-ad11-2855276e1356\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-4rc8x" Sep 30 12:51:47 crc kubenswrapper[5002]: I0930 12:51:47.104531 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71bf5034-6600-47c8-ad11-2855276e1356-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-4rc8x\" (UID: \"71bf5034-6600-47c8-ad11-2855276e1356\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-4rc8x" Sep 30 12:51:47 crc kubenswrapper[5002]: I0930 12:51:47.151982 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-4rc8x" Sep 30 12:51:47 crc kubenswrapper[5002]: I0930 12:51:47.685052 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-4rc8x"] Sep 30 12:51:47 crc kubenswrapper[5002]: W0930 12:51:47.693770 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71bf5034_6600_47c8_ad11_2855276e1356.slice/crio-8ed4e63057701341c9be178070876219354f8db54204a1562c6423cbd7f8e9e8 WatchSource:0}: Error finding container 8ed4e63057701341c9be178070876219354f8db54204a1562c6423cbd7f8e9e8: Status 404 returned error can't find the container with id 8ed4e63057701341c9be178070876219354f8db54204a1562c6423cbd7f8e9e8 Sep 30 12:51:47 crc kubenswrapper[5002]: I0930 12:51:47.695912 5002 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 12:51:47 crc kubenswrapper[5002]: I0930 12:51:47.738101 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-4rc8x" event={"ID":"71bf5034-6600-47c8-ad11-2855276e1356","Type":"ContainerStarted","Data":"8ed4e63057701341c9be178070876219354f8db54204a1562c6423cbd7f8e9e8"} Sep 30 12:51:48 crc kubenswrapper[5002]: I0930 12:51:48.747802 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-4rc8x" event={"ID":"71bf5034-6600-47c8-ad11-2855276e1356","Type":"ContainerStarted","Data":"7e4a1de20a2b561e03bed536e326a8c60ddf3d2c794c813387889d41d974e63e"} Sep 30 12:51:48 crc kubenswrapper[5002]: I0930 12:51:48.772323 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-4rc8x" podStartSLOduration=2.306614805 podStartE2EDuration="2.772302538s" podCreationTimestamp="2025-09-30 12:51:46 +0000 UTC" firstStartedPulling="2025-09-30 12:51:47.695627382 +0000 UTC m=+1881.945309528" lastFinishedPulling="2025-09-30 12:51:48.161315115 +0000 UTC m=+1882.410997261" observedRunningTime="2025-09-30 12:51:48.760988416 +0000 UTC m=+1883.010670572" watchObservedRunningTime="2025-09-30 12:51:48.772302538 +0000 UTC m=+1883.021984684" Sep 30 12:52:32 crc kubenswrapper[5002]: I0930 12:52:32.098799 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:52:32 crc kubenswrapper[5002]: I0930 12:52:32.099247 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:52:48 crc kubenswrapper[5002]: I0930 12:52:48.269019 5002 generic.go:334] "Generic (PLEG): container finished" podID="71bf5034-6600-47c8-ad11-2855276e1356" containerID="7e4a1de20a2b561e03bed536e326a8c60ddf3d2c794c813387889d41d974e63e" exitCode=0 Sep 30 12:52:48 crc kubenswrapper[5002]: I0930 12:52:48.269163 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-4rc8x" event={"ID":"71bf5034-6600-47c8-ad11-2855276e1356","Type":"ContainerDied","Data":"7e4a1de20a2b561e03bed536e326a8c60ddf3d2c794c813387889d41d974e63e"} Sep 30 12:52:49 crc kubenswrapper[5002]: I0930 12:52:49.664946 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-4rc8x" Sep 30 12:52:49 crc kubenswrapper[5002]: I0930 12:52:49.843613 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/71bf5034-6600-47c8-ad11-2855276e1356-ovncontroller-config-0\") pod \"71bf5034-6600-47c8-ad11-2855276e1356\" (UID: \"71bf5034-6600-47c8-ad11-2855276e1356\") " Sep 30 12:52:49 crc kubenswrapper[5002]: I0930 12:52:49.843901 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71bf5034-6600-47c8-ad11-2855276e1356-ovn-combined-ca-bundle\") pod \"71bf5034-6600-47c8-ad11-2855276e1356\" (UID: \"71bf5034-6600-47c8-ad11-2855276e1356\") " Sep 30 12:52:49 crc kubenswrapper[5002]: I0930 12:52:49.844012 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71bf5034-6600-47c8-ad11-2855276e1356-inventory\") pod \"71bf5034-6600-47c8-ad11-2855276e1356\" (UID: \"71bf5034-6600-47c8-ad11-2855276e1356\") " Sep 30 12:52:49 crc kubenswrapper[5002]: I0930 12:52:49.844183 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/71bf5034-6600-47c8-ad11-2855276e1356-ssh-key\") pod \"71bf5034-6600-47c8-ad11-2855276e1356\" (UID: \"71bf5034-6600-47c8-ad11-2855276e1356\") " Sep 30 12:52:49 crc kubenswrapper[5002]: I0930 12:52:49.844394 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jl6sh\" (UniqueName: \"kubernetes.io/projected/71bf5034-6600-47c8-ad11-2855276e1356-kube-api-access-jl6sh\") pod \"71bf5034-6600-47c8-ad11-2855276e1356\" (UID: \"71bf5034-6600-47c8-ad11-2855276e1356\") " Sep 30 12:52:49 crc kubenswrapper[5002]: I0930 12:52:49.849509 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71bf5034-6600-47c8-ad11-2855276e1356-kube-api-access-jl6sh" (OuterVolumeSpecName: "kube-api-access-jl6sh") pod "71bf5034-6600-47c8-ad11-2855276e1356" (UID: "71bf5034-6600-47c8-ad11-2855276e1356"). InnerVolumeSpecName "kube-api-access-jl6sh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:52:49 crc kubenswrapper[5002]: I0930 12:52:49.850034 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71bf5034-6600-47c8-ad11-2855276e1356-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "71bf5034-6600-47c8-ad11-2855276e1356" (UID: "71bf5034-6600-47c8-ad11-2855276e1356"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:52:49 crc kubenswrapper[5002]: I0930 12:52:49.869327 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71bf5034-6600-47c8-ad11-2855276e1356-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "71bf5034-6600-47c8-ad11-2855276e1356" (UID: "71bf5034-6600-47c8-ad11-2855276e1356"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 12:52:49 crc kubenswrapper[5002]: I0930 12:52:49.871838 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71bf5034-6600-47c8-ad11-2855276e1356-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "71bf5034-6600-47c8-ad11-2855276e1356" (UID: "71bf5034-6600-47c8-ad11-2855276e1356"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:52:49 crc kubenswrapper[5002]: I0930 12:52:49.873536 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71bf5034-6600-47c8-ad11-2855276e1356-inventory" (OuterVolumeSpecName: "inventory") pod "71bf5034-6600-47c8-ad11-2855276e1356" (UID: "71bf5034-6600-47c8-ad11-2855276e1356"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:52:49 crc kubenswrapper[5002]: I0930 12:52:49.946498 5002 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/71bf5034-6600-47c8-ad11-2855276e1356-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 12:52:49 crc kubenswrapper[5002]: I0930 12:52:49.946739 5002 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71bf5034-6600-47c8-ad11-2855276e1356-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:52:49 crc kubenswrapper[5002]: I0930 12:52:49.946823 5002 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71bf5034-6600-47c8-ad11-2855276e1356-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 12:52:49 crc kubenswrapper[5002]: I0930 12:52:49.946896 5002 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/71bf5034-6600-47c8-ad11-2855276e1356-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 12:52:49 crc kubenswrapper[5002]: I0930 12:52:49.946969 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jl6sh\" (UniqueName: \"kubernetes.io/projected/71bf5034-6600-47c8-ad11-2855276e1356-kube-api-access-jl6sh\") on node \"crc\" DevicePath \"\"" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.288251 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-4rc8x" event={"ID":"71bf5034-6600-47c8-ad11-2855276e1356","Type":"ContainerDied","Data":"8ed4e63057701341c9be178070876219354f8db54204a1562c6423cbd7f8e9e8"} Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.288704 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ed4e63057701341c9be178070876219354f8db54204a1562c6423cbd7f8e9e8" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.288397 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-4rc8x" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.377967 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr"] Sep 30 12:52:50 crc kubenswrapper[5002]: E0930 12:52:50.378358 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71bf5034-6600-47c8-ad11-2855276e1356" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.378384 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="71bf5034-6600-47c8-ad11-2855276e1356" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.378668 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="71bf5034-6600-47c8-ad11-2855276e1356" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.379375 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.381405 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.383104 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.383207 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.383235 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.383444 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-ddhrk" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.383497 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.398502 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr"] Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.558311 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr\" (UID: \"afe17fe9-74c6-442a-a2c3-70958d7a706b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.558518 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr\" (UID: \"afe17fe9-74c6-442a-a2c3-70958d7a706b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.558891 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr\" (UID: \"afe17fe9-74c6-442a-a2c3-70958d7a706b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.558991 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr\" (UID: \"afe17fe9-74c6-442a-a2c3-70958d7a706b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.559119 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr\" (UID: \"afe17fe9-74c6-442a-a2c3-70958d7a706b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.559155 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzj95\" (UniqueName: \"kubernetes.io/projected/afe17fe9-74c6-442a-a2c3-70958d7a706b-kube-api-access-wzj95\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr\" (UID: \"afe17fe9-74c6-442a-a2c3-70958d7a706b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.660800 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr\" (UID: \"afe17fe9-74c6-442a-a2c3-70958d7a706b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.661534 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzj95\" (UniqueName: \"kubernetes.io/projected/afe17fe9-74c6-442a-a2c3-70958d7a706b-kube-api-access-wzj95\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr\" (UID: \"afe17fe9-74c6-442a-a2c3-70958d7a706b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.661672 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr\" (UID: \"afe17fe9-74c6-442a-a2c3-70958d7a706b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.661769 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr\" (UID: \"afe17fe9-74c6-442a-a2c3-70958d7a706b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.661956 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr\" (UID: \"afe17fe9-74c6-442a-a2c3-70958d7a706b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.662013 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr\" (UID: \"afe17fe9-74c6-442a-a2c3-70958d7a706b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.666260 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr\" (UID: \"afe17fe9-74c6-442a-a2c3-70958d7a706b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.668545 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr\" (UID: \"afe17fe9-74c6-442a-a2c3-70958d7a706b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.668608 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr\" (UID: \"afe17fe9-74c6-442a-a2c3-70958d7a706b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.669015 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr\" (UID: \"afe17fe9-74c6-442a-a2c3-70958d7a706b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.670687 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr\" (UID: \"afe17fe9-74c6-442a-a2c3-70958d7a706b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.691398 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzj95\" (UniqueName: \"kubernetes.io/projected/afe17fe9-74c6-442a-a2c3-70958d7a706b-kube-api-access-wzj95\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr\" (UID: \"afe17fe9-74c6-442a-a2c3-70958d7a706b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr" Sep 30 12:52:50 crc kubenswrapper[5002]: I0930 12:52:50.697232 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr" Sep 30 12:52:51 crc kubenswrapper[5002]: I0930 12:52:51.190481 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr"] Sep 30 12:52:51 crc kubenswrapper[5002]: I0930 12:52:51.303598 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr" event={"ID":"afe17fe9-74c6-442a-a2c3-70958d7a706b","Type":"ContainerStarted","Data":"d2543f7890498ceb4f55e9095da3539467798fcd17fdf7fafc355d21f1962e18"} Sep 30 12:52:52 crc kubenswrapper[5002]: I0930 12:52:52.313134 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr" event={"ID":"afe17fe9-74c6-442a-a2c3-70958d7a706b","Type":"ContainerStarted","Data":"1e250aadb4f260b9d1d2b68752233f7c25fecf4b9953338b29aa392187d8368f"} Sep 30 12:52:52 crc kubenswrapper[5002]: I0930 12:52:52.334314 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr" podStartSLOduration=1.886505211 podStartE2EDuration="2.334297293s" podCreationTimestamp="2025-09-30 12:52:50 +0000 UTC" firstStartedPulling="2025-09-30 12:52:51.195457288 +0000 UTC m=+1945.445139434" lastFinishedPulling="2025-09-30 12:52:51.64324937 +0000 UTC m=+1945.892931516" observedRunningTime="2025-09-30 12:52:52.32959831 +0000 UTC m=+1946.579280466" watchObservedRunningTime="2025-09-30 12:52:52.334297293 +0000 UTC m=+1946.583979429" Sep 30 12:53:02 crc kubenswrapper[5002]: I0930 12:53:02.098138 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:53:02 crc kubenswrapper[5002]: I0930 12:53:02.098714 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:53:32 crc kubenswrapper[5002]: I0930 12:53:32.098697 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:53:32 crc kubenswrapper[5002]: I0930 12:53:32.099218 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:53:32 crc kubenswrapper[5002]: I0930 12:53:32.099263 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" Sep 30 12:53:32 crc kubenswrapper[5002]: I0930 12:53:32.099985 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"83f28e70b07c52d080e9624494f8eff05394195f3007fb1fb9e6b51a22eef012"} pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 12:53:32 crc kubenswrapper[5002]: I0930 12:53:32.100048 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" containerID="cri-o://83f28e70b07c52d080e9624494f8eff05394195f3007fb1fb9e6b51a22eef012" gracePeriod=600 Sep 30 12:53:32 crc kubenswrapper[5002]: I0930 12:53:32.684607 5002 generic.go:334] "Generic (PLEG): container finished" podID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerID="83f28e70b07c52d080e9624494f8eff05394195f3007fb1fb9e6b51a22eef012" exitCode=0 Sep 30 12:53:32 crc kubenswrapper[5002]: I0930 12:53:32.686501 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerDied","Data":"83f28e70b07c52d080e9624494f8eff05394195f3007fb1fb9e6b51a22eef012"} Sep 30 12:53:32 crc kubenswrapper[5002]: I0930 12:53:32.686771 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerStarted","Data":"c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0"} Sep 30 12:53:32 crc kubenswrapper[5002]: I0930 12:53:32.686790 5002 scope.go:117] "RemoveContainer" containerID="a913a5b698811d1cde360ed1593c0fd1d395638886860c838d99d6b923c5d677" Sep 30 12:53:37 crc kubenswrapper[5002]: I0930 12:53:37.733335 5002 generic.go:334] "Generic (PLEG): container finished" podID="afe17fe9-74c6-442a-a2c3-70958d7a706b" containerID="1e250aadb4f260b9d1d2b68752233f7c25fecf4b9953338b29aa392187d8368f" exitCode=0 Sep 30 12:53:37 crc kubenswrapper[5002]: I0930 12:53:37.733420 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr" event={"ID":"afe17fe9-74c6-442a-a2c3-70958d7a706b","Type":"ContainerDied","Data":"1e250aadb4f260b9d1d2b68752233f7c25fecf4b9953338b29aa392187d8368f"} Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.140209 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.267659 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-ssh-key\") pod \"afe17fe9-74c6-442a-a2c3-70958d7a706b\" (UID: \"afe17fe9-74c6-442a-a2c3-70958d7a706b\") " Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.267816 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-neutron-ovn-metadata-agent-neutron-config-0\") pod \"afe17fe9-74c6-442a-a2c3-70958d7a706b\" (UID: \"afe17fe9-74c6-442a-a2c3-70958d7a706b\") " Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.267924 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wzj95\" (UniqueName: \"kubernetes.io/projected/afe17fe9-74c6-442a-a2c3-70958d7a706b-kube-api-access-wzj95\") pod \"afe17fe9-74c6-442a-a2c3-70958d7a706b\" (UID: \"afe17fe9-74c6-442a-a2c3-70958d7a706b\") " Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.267990 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-inventory\") pod \"afe17fe9-74c6-442a-a2c3-70958d7a706b\" (UID: \"afe17fe9-74c6-442a-a2c3-70958d7a706b\") " Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.268023 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-neutron-metadata-combined-ca-bundle\") pod \"afe17fe9-74c6-442a-a2c3-70958d7a706b\" (UID: \"afe17fe9-74c6-442a-a2c3-70958d7a706b\") " Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.268085 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-nova-metadata-neutron-config-0\") pod \"afe17fe9-74c6-442a-a2c3-70958d7a706b\" (UID: \"afe17fe9-74c6-442a-a2c3-70958d7a706b\") " Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.274224 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "afe17fe9-74c6-442a-a2c3-70958d7a706b" (UID: "afe17fe9-74c6-442a-a2c3-70958d7a706b"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.274853 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afe17fe9-74c6-442a-a2c3-70958d7a706b-kube-api-access-wzj95" (OuterVolumeSpecName: "kube-api-access-wzj95") pod "afe17fe9-74c6-442a-a2c3-70958d7a706b" (UID: "afe17fe9-74c6-442a-a2c3-70958d7a706b"). InnerVolumeSpecName "kube-api-access-wzj95". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.297905 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "afe17fe9-74c6-442a-a2c3-70958d7a706b" (UID: "afe17fe9-74c6-442a-a2c3-70958d7a706b"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.298275 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "afe17fe9-74c6-442a-a2c3-70958d7a706b" (UID: "afe17fe9-74c6-442a-a2c3-70958d7a706b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.300207 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "afe17fe9-74c6-442a-a2c3-70958d7a706b" (UID: "afe17fe9-74c6-442a-a2c3-70958d7a706b"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.304645 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-inventory" (OuterVolumeSpecName: "inventory") pod "afe17fe9-74c6-442a-a2c3-70958d7a706b" (UID: "afe17fe9-74c6-442a-a2c3-70958d7a706b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.370833 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wzj95\" (UniqueName: \"kubernetes.io/projected/afe17fe9-74c6-442a-a2c3-70958d7a706b-kube-api-access-wzj95\") on node \"crc\" DevicePath \"\"" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.370869 5002 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.370883 5002 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.370895 5002 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.370908 5002 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.370919 5002 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/afe17fe9-74c6-442a-a2c3-70958d7a706b-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.751779 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr" event={"ID":"afe17fe9-74c6-442a-a2c3-70958d7a706b","Type":"ContainerDied","Data":"d2543f7890498ceb4f55e9095da3539467798fcd17fdf7fafc355d21f1962e18"} Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.752166 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d2543f7890498ceb4f55e9095da3539467798fcd17fdf7fafc355d21f1962e18" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.751821 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.873025 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v"] Sep 30 12:53:39 crc kubenswrapper[5002]: E0930 12:53:39.874021 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afe17fe9-74c6-442a-a2c3-70958d7a706b" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.874050 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="afe17fe9-74c6-442a-a2c3-70958d7a706b" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.874333 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="afe17fe9-74c6-442a-a2c3-70958d7a706b" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.875188 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.879080 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.879140 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.879080 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.879768 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-ddhrk" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.879954 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.880568 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bq9h2\" (UniqueName: \"kubernetes.io/projected/7a8b7e27-6872-47a1-b564-9a288ac7cef0-kube-api-access-bq9h2\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v\" (UID: \"7a8b7e27-6872-47a1-b564-9a288ac7cef0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.880649 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7a8b7e27-6872-47a1-b564-9a288ac7cef0-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v\" (UID: \"7a8b7e27-6872-47a1-b564-9a288ac7cef0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.881061 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a8b7e27-6872-47a1-b564-9a288ac7cef0-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v\" (UID: \"7a8b7e27-6872-47a1-b564-9a288ac7cef0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.881262 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7a8b7e27-6872-47a1-b564-9a288ac7cef0-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v\" (UID: \"7a8b7e27-6872-47a1-b564-9a288ac7cef0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.881313 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/7a8b7e27-6872-47a1-b564-9a288ac7cef0-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v\" (UID: \"7a8b7e27-6872-47a1-b564-9a288ac7cef0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.887701 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v"] Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.983559 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bq9h2\" (UniqueName: \"kubernetes.io/projected/7a8b7e27-6872-47a1-b564-9a288ac7cef0-kube-api-access-bq9h2\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v\" (UID: \"7a8b7e27-6872-47a1-b564-9a288ac7cef0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.983620 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7a8b7e27-6872-47a1-b564-9a288ac7cef0-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v\" (UID: \"7a8b7e27-6872-47a1-b564-9a288ac7cef0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.983733 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a8b7e27-6872-47a1-b564-9a288ac7cef0-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v\" (UID: \"7a8b7e27-6872-47a1-b564-9a288ac7cef0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.983802 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7a8b7e27-6872-47a1-b564-9a288ac7cef0-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v\" (UID: \"7a8b7e27-6872-47a1-b564-9a288ac7cef0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.983823 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/7a8b7e27-6872-47a1-b564-9a288ac7cef0-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v\" (UID: \"7a8b7e27-6872-47a1-b564-9a288ac7cef0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.988318 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7a8b7e27-6872-47a1-b564-9a288ac7cef0-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v\" (UID: \"7a8b7e27-6872-47a1-b564-9a288ac7cef0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.988394 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/7a8b7e27-6872-47a1-b564-9a288ac7cef0-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v\" (UID: \"7a8b7e27-6872-47a1-b564-9a288ac7cef0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.988966 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a8b7e27-6872-47a1-b564-9a288ac7cef0-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v\" (UID: \"7a8b7e27-6872-47a1-b564-9a288ac7cef0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v" Sep 30 12:53:39 crc kubenswrapper[5002]: I0930 12:53:39.989422 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7a8b7e27-6872-47a1-b564-9a288ac7cef0-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v\" (UID: \"7a8b7e27-6872-47a1-b564-9a288ac7cef0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v" Sep 30 12:53:40 crc kubenswrapper[5002]: I0930 12:53:40.001020 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bq9h2\" (UniqueName: \"kubernetes.io/projected/7a8b7e27-6872-47a1-b564-9a288ac7cef0-kube-api-access-bq9h2\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v\" (UID: \"7a8b7e27-6872-47a1-b564-9a288ac7cef0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v" Sep 30 12:53:40 crc kubenswrapper[5002]: I0930 12:53:40.192763 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v" Sep 30 12:53:40 crc kubenswrapper[5002]: I0930 12:53:40.840948 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v"] Sep 30 12:53:41 crc kubenswrapper[5002]: I0930 12:53:41.771458 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v" event={"ID":"7a8b7e27-6872-47a1-b564-9a288ac7cef0","Type":"ContainerStarted","Data":"adf2f5831d8bbde553ae2303e2087a2c3759e3f7bd16e37e526b2fd6e9bf1cd2"} Sep 30 12:53:41 crc kubenswrapper[5002]: I0930 12:53:41.773100 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v" event={"ID":"7a8b7e27-6872-47a1-b564-9a288ac7cef0","Type":"ContainerStarted","Data":"5e61b9701b62113abdf5e5a9274e4be88181a90bac722a6782b9eba330071be9"} Sep 30 12:53:41 crc kubenswrapper[5002]: I0930 12:53:41.799791 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v" podStartSLOduration=2.38203754 podStartE2EDuration="2.799765593s" podCreationTimestamp="2025-09-30 12:53:39 +0000 UTC" firstStartedPulling="2025-09-30 12:53:40.845032591 +0000 UTC m=+1995.094714737" lastFinishedPulling="2025-09-30 12:53:41.262760644 +0000 UTC m=+1995.512442790" observedRunningTime="2025-09-30 12:53:41.787501526 +0000 UTC m=+1996.037183692" watchObservedRunningTime="2025-09-30 12:53:41.799765593 +0000 UTC m=+1996.049447759" Sep 30 12:54:00 crc kubenswrapper[5002]: I0930 12:54:00.911871 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tj78p"] Sep 30 12:54:00 crc kubenswrapper[5002]: I0930 12:54:00.915587 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tj78p" Sep 30 12:54:00 crc kubenswrapper[5002]: I0930 12:54:00.926141 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tj78p"] Sep 30 12:54:00 crc kubenswrapper[5002]: I0930 12:54:00.988201 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5942273-91c1-4cda-9619-63cee7b422b0-utilities\") pod \"redhat-operators-tj78p\" (UID: \"e5942273-91c1-4cda-9619-63cee7b422b0\") " pod="openshift-marketplace/redhat-operators-tj78p" Sep 30 12:54:00 crc kubenswrapper[5002]: I0930 12:54:00.988389 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5942273-91c1-4cda-9619-63cee7b422b0-catalog-content\") pod \"redhat-operators-tj78p\" (UID: \"e5942273-91c1-4cda-9619-63cee7b422b0\") " pod="openshift-marketplace/redhat-operators-tj78p" Sep 30 12:54:00 crc kubenswrapper[5002]: I0930 12:54:00.988463 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hw28\" (UniqueName: \"kubernetes.io/projected/e5942273-91c1-4cda-9619-63cee7b422b0-kube-api-access-2hw28\") pod \"redhat-operators-tj78p\" (UID: \"e5942273-91c1-4cda-9619-63cee7b422b0\") " pod="openshift-marketplace/redhat-operators-tj78p" Sep 30 12:54:01 crc kubenswrapper[5002]: I0930 12:54:01.090814 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5942273-91c1-4cda-9619-63cee7b422b0-catalog-content\") pod \"redhat-operators-tj78p\" (UID: \"e5942273-91c1-4cda-9619-63cee7b422b0\") " pod="openshift-marketplace/redhat-operators-tj78p" Sep 30 12:54:01 crc kubenswrapper[5002]: I0930 12:54:01.090899 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hw28\" (UniqueName: \"kubernetes.io/projected/e5942273-91c1-4cda-9619-63cee7b422b0-kube-api-access-2hw28\") pod \"redhat-operators-tj78p\" (UID: \"e5942273-91c1-4cda-9619-63cee7b422b0\") " pod="openshift-marketplace/redhat-operators-tj78p" Sep 30 12:54:01 crc kubenswrapper[5002]: I0930 12:54:01.090962 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5942273-91c1-4cda-9619-63cee7b422b0-utilities\") pod \"redhat-operators-tj78p\" (UID: \"e5942273-91c1-4cda-9619-63cee7b422b0\") " pod="openshift-marketplace/redhat-operators-tj78p" Sep 30 12:54:01 crc kubenswrapper[5002]: I0930 12:54:01.091536 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5942273-91c1-4cda-9619-63cee7b422b0-utilities\") pod \"redhat-operators-tj78p\" (UID: \"e5942273-91c1-4cda-9619-63cee7b422b0\") " pod="openshift-marketplace/redhat-operators-tj78p" Sep 30 12:54:01 crc kubenswrapper[5002]: I0930 12:54:01.091535 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5942273-91c1-4cda-9619-63cee7b422b0-catalog-content\") pod \"redhat-operators-tj78p\" (UID: \"e5942273-91c1-4cda-9619-63cee7b422b0\") " pod="openshift-marketplace/redhat-operators-tj78p" Sep 30 12:54:01 crc kubenswrapper[5002]: I0930 12:54:01.110653 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hw28\" (UniqueName: \"kubernetes.io/projected/e5942273-91c1-4cda-9619-63cee7b422b0-kube-api-access-2hw28\") pod \"redhat-operators-tj78p\" (UID: \"e5942273-91c1-4cda-9619-63cee7b422b0\") " pod="openshift-marketplace/redhat-operators-tj78p" Sep 30 12:54:01 crc kubenswrapper[5002]: I0930 12:54:01.238297 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tj78p" Sep 30 12:54:01 crc kubenswrapper[5002]: I0930 12:54:01.703739 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tj78p"] Sep 30 12:54:01 crc kubenswrapper[5002]: I0930 12:54:01.951284 5002 generic.go:334] "Generic (PLEG): container finished" podID="e5942273-91c1-4cda-9619-63cee7b422b0" containerID="2a381531ab2adeb46b3ddb41588ee92891159bda174867fd98e4909e6440f193" exitCode=0 Sep 30 12:54:01 crc kubenswrapper[5002]: I0930 12:54:01.951738 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tj78p" event={"ID":"e5942273-91c1-4cda-9619-63cee7b422b0","Type":"ContainerDied","Data":"2a381531ab2adeb46b3ddb41588ee92891159bda174867fd98e4909e6440f193"} Sep 30 12:54:01 crc kubenswrapper[5002]: I0930 12:54:01.951766 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tj78p" event={"ID":"e5942273-91c1-4cda-9619-63cee7b422b0","Type":"ContainerStarted","Data":"8a52f56245ffdb6df98ebc37d6aefa4466b890e2a7579eca37ab3b5bcea6c175"} Sep 30 12:54:02 crc kubenswrapper[5002]: I0930 12:54:02.961177 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tj78p" event={"ID":"e5942273-91c1-4cda-9619-63cee7b422b0","Type":"ContainerStarted","Data":"be058d1bafe9bff579d3c2312d758b5ed94da75c849a94f4b318a8c83ee91a5f"} Sep 30 12:54:05 crc kubenswrapper[5002]: I0930 12:54:05.996164 5002 generic.go:334] "Generic (PLEG): container finished" podID="e5942273-91c1-4cda-9619-63cee7b422b0" containerID="be058d1bafe9bff579d3c2312d758b5ed94da75c849a94f4b318a8c83ee91a5f" exitCode=0 Sep 30 12:54:05 crc kubenswrapper[5002]: I0930 12:54:05.996217 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tj78p" event={"ID":"e5942273-91c1-4cda-9619-63cee7b422b0","Type":"ContainerDied","Data":"be058d1bafe9bff579d3c2312d758b5ed94da75c849a94f4b318a8c83ee91a5f"} Sep 30 12:54:07 crc kubenswrapper[5002]: I0930 12:54:07.009303 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tj78p" event={"ID":"e5942273-91c1-4cda-9619-63cee7b422b0","Type":"ContainerStarted","Data":"d86f7870da0994e47ff1586f44a50a5604e1a05fd4479a9299eb9b69a64f3a39"} Sep 30 12:54:07 crc kubenswrapper[5002]: I0930 12:54:07.033286 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tj78p" podStartSLOduration=2.5288349759999997 podStartE2EDuration="7.033269827s" podCreationTimestamp="2025-09-30 12:54:00 +0000 UTC" firstStartedPulling="2025-09-30 12:54:01.953180288 +0000 UTC m=+2016.202862424" lastFinishedPulling="2025-09-30 12:54:06.457615129 +0000 UTC m=+2020.707297275" observedRunningTime="2025-09-30 12:54:07.026254363 +0000 UTC m=+2021.275936499" watchObservedRunningTime="2025-09-30 12:54:07.033269827 +0000 UTC m=+2021.282951973" Sep 30 12:54:11 crc kubenswrapper[5002]: I0930 12:54:11.239543 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tj78p" Sep 30 12:54:11 crc kubenswrapper[5002]: I0930 12:54:11.240116 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tj78p" Sep 30 12:54:11 crc kubenswrapper[5002]: I0930 12:54:11.293441 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tj78p" Sep 30 12:54:12 crc kubenswrapper[5002]: I0930 12:54:12.105043 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tj78p" Sep 30 12:54:12 crc kubenswrapper[5002]: I0930 12:54:12.154300 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tj78p"] Sep 30 12:54:14 crc kubenswrapper[5002]: I0930 12:54:14.070900 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tj78p" podUID="e5942273-91c1-4cda-9619-63cee7b422b0" containerName="registry-server" containerID="cri-o://d86f7870da0994e47ff1586f44a50a5604e1a05fd4479a9299eb9b69a64f3a39" gracePeriod=2 Sep 30 12:54:14 crc kubenswrapper[5002]: I0930 12:54:14.529074 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tj78p" Sep 30 12:54:14 crc kubenswrapper[5002]: I0930 12:54:14.550857 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5942273-91c1-4cda-9619-63cee7b422b0-utilities\") pod \"e5942273-91c1-4cda-9619-63cee7b422b0\" (UID: \"e5942273-91c1-4cda-9619-63cee7b422b0\") " Sep 30 12:54:14 crc kubenswrapper[5002]: I0930 12:54:14.550951 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2hw28\" (UniqueName: \"kubernetes.io/projected/e5942273-91c1-4cda-9619-63cee7b422b0-kube-api-access-2hw28\") pod \"e5942273-91c1-4cda-9619-63cee7b422b0\" (UID: \"e5942273-91c1-4cda-9619-63cee7b422b0\") " Sep 30 12:54:14 crc kubenswrapper[5002]: I0930 12:54:14.551066 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5942273-91c1-4cda-9619-63cee7b422b0-catalog-content\") pod \"e5942273-91c1-4cda-9619-63cee7b422b0\" (UID: \"e5942273-91c1-4cda-9619-63cee7b422b0\") " Sep 30 12:54:14 crc kubenswrapper[5002]: I0930 12:54:14.551887 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5942273-91c1-4cda-9619-63cee7b422b0-utilities" (OuterVolumeSpecName: "utilities") pod "e5942273-91c1-4cda-9619-63cee7b422b0" (UID: "e5942273-91c1-4cda-9619-63cee7b422b0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:54:14 crc kubenswrapper[5002]: I0930 12:54:14.558997 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5942273-91c1-4cda-9619-63cee7b422b0-kube-api-access-2hw28" (OuterVolumeSpecName: "kube-api-access-2hw28") pod "e5942273-91c1-4cda-9619-63cee7b422b0" (UID: "e5942273-91c1-4cda-9619-63cee7b422b0"). InnerVolumeSpecName "kube-api-access-2hw28". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:54:14 crc kubenswrapper[5002]: I0930 12:54:14.639511 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5942273-91c1-4cda-9619-63cee7b422b0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e5942273-91c1-4cda-9619-63cee7b422b0" (UID: "e5942273-91c1-4cda-9619-63cee7b422b0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:54:14 crc kubenswrapper[5002]: I0930 12:54:14.652729 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5942273-91c1-4cda-9619-63cee7b422b0-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 12:54:14 crc kubenswrapper[5002]: I0930 12:54:14.652950 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2hw28\" (UniqueName: \"kubernetes.io/projected/e5942273-91c1-4cda-9619-63cee7b422b0-kube-api-access-2hw28\") on node \"crc\" DevicePath \"\"" Sep 30 12:54:14 crc kubenswrapper[5002]: I0930 12:54:14.653014 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5942273-91c1-4cda-9619-63cee7b422b0-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 12:54:15 crc kubenswrapper[5002]: I0930 12:54:15.083432 5002 generic.go:334] "Generic (PLEG): container finished" podID="e5942273-91c1-4cda-9619-63cee7b422b0" containerID="d86f7870da0994e47ff1586f44a50a5604e1a05fd4479a9299eb9b69a64f3a39" exitCode=0 Sep 30 12:54:15 crc kubenswrapper[5002]: I0930 12:54:15.083512 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tj78p" event={"ID":"e5942273-91c1-4cda-9619-63cee7b422b0","Type":"ContainerDied","Data":"d86f7870da0994e47ff1586f44a50a5604e1a05fd4479a9299eb9b69a64f3a39"} Sep 30 12:54:15 crc kubenswrapper[5002]: I0930 12:54:15.083574 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tj78p" Sep 30 12:54:15 crc kubenswrapper[5002]: I0930 12:54:15.083586 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tj78p" event={"ID":"e5942273-91c1-4cda-9619-63cee7b422b0","Type":"ContainerDied","Data":"8a52f56245ffdb6df98ebc37d6aefa4466b890e2a7579eca37ab3b5bcea6c175"} Sep 30 12:54:15 crc kubenswrapper[5002]: I0930 12:54:15.083615 5002 scope.go:117] "RemoveContainer" containerID="d86f7870da0994e47ff1586f44a50a5604e1a05fd4479a9299eb9b69a64f3a39" Sep 30 12:54:15 crc kubenswrapper[5002]: I0930 12:54:15.115159 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tj78p"] Sep 30 12:54:15 crc kubenswrapper[5002]: I0930 12:54:15.116822 5002 scope.go:117] "RemoveContainer" containerID="be058d1bafe9bff579d3c2312d758b5ed94da75c849a94f4b318a8c83ee91a5f" Sep 30 12:54:15 crc kubenswrapper[5002]: I0930 12:54:15.125067 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tj78p"] Sep 30 12:54:15 crc kubenswrapper[5002]: I0930 12:54:15.146897 5002 scope.go:117] "RemoveContainer" containerID="2a381531ab2adeb46b3ddb41588ee92891159bda174867fd98e4909e6440f193" Sep 30 12:54:15 crc kubenswrapper[5002]: I0930 12:54:15.191574 5002 scope.go:117] "RemoveContainer" containerID="d86f7870da0994e47ff1586f44a50a5604e1a05fd4479a9299eb9b69a64f3a39" Sep 30 12:54:15 crc kubenswrapper[5002]: E0930 12:54:15.192222 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d86f7870da0994e47ff1586f44a50a5604e1a05fd4479a9299eb9b69a64f3a39\": container with ID starting with d86f7870da0994e47ff1586f44a50a5604e1a05fd4479a9299eb9b69a64f3a39 not found: ID does not exist" containerID="d86f7870da0994e47ff1586f44a50a5604e1a05fd4479a9299eb9b69a64f3a39" Sep 30 12:54:15 crc kubenswrapper[5002]: I0930 12:54:15.192268 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d86f7870da0994e47ff1586f44a50a5604e1a05fd4479a9299eb9b69a64f3a39"} err="failed to get container status \"d86f7870da0994e47ff1586f44a50a5604e1a05fd4479a9299eb9b69a64f3a39\": rpc error: code = NotFound desc = could not find container \"d86f7870da0994e47ff1586f44a50a5604e1a05fd4479a9299eb9b69a64f3a39\": container with ID starting with d86f7870da0994e47ff1586f44a50a5604e1a05fd4479a9299eb9b69a64f3a39 not found: ID does not exist" Sep 30 12:54:15 crc kubenswrapper[5002]: I0930 12:54:15.192296 5002 scope.go:117] "RemoveContainer" containerID="be058d1bafe9bff579d3c2312d758b5ed94da75c849a94f4b318a8c83ee91a5f" Sep 30 12:54:15 crc kubenswrapper[5002]: E0930 12:54:15.192741 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be058d1bafe9bff579d3c2312d758b5ed94da75c849a94f4b318a8c83ee91a5f\": container with ID starting with be058d1bafe9bff579d3c2312d758b5ed94da75c849a94f4b318a8c83ee91a5f not found: ID does not exist" containerID="be058d1bafe9bff579d3c2312d758b5ed94da75c849a94f4b318a8c83ee91a5f" Sep 30 12:54:15 crc kubenswrapper[5002]: I0930 12:54:15.192791 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be058d1bafe9bff579d3c2312d758b5ed94da75c849a94f4b318a8c83ee91a5f"} err="failed to get container status \"be058d1bafe9bff579d3c2312d758b5ed94da75c849a94f4b318a8c83ee91a5f\": rpc error: code = NotFound desc = could not find container \"be058d1bafe9bff579d3c2312d758b5ed94da75c849a94f4b318a8c83ee91a5f\": container with ID starting with be058d1bafe9bff579d3c2312d758b5ed94da75c849a94f4b318a8c83ee91a5f not found: ID does not exist" Sep 30 12:54:15 crc kubenswrapper[5002]: I0930 12:54:15.192817 5002 scope.go:117] "RemoveContainer" containerID="2a381531ab2adeb46b3ddb41588ee92891159bda174867fd98e4909e6440f193" Sep 30 12:54:15 crc kubenswrapper[5002]: E0930 12:54:15.193284 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a381531ab2adeb46b3ddb41588ee92891159bda174867fd98e4909e6440f193\": container with ID starting with 2a381531ab2adeb46b3ddb41588ee92891159bda174867fd98e4909e6440f193 not found: ID does not exist" containerID="2a381531ab2adeb46b3ddb41588ee92891159bda174867fd98e4909e6440f193" Sep 30 12:54:15 crc kubenswrapper[5002]: I0930 12:54:15.193325 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a381531ab2adeb46b3ddb41588ee92891159bda174867fd98e4909e6440f193"} err="failed to get container status \"2a381531ab2adeb46b3ddb41588ee92891159bda174867fd98e4909e6440f193\": rpc error: code = NotFound desc = could not find container \"2a381531ab2adeb46b3ddb41588ee92891159bda174867fd98e4909e6440f193\": container with ID starting with 2a381531ab2adeb46b3ddb41588ee92891159bda174867fd98e4909e6440f193 not found: ID does not exist" Sep 30 12:54:16 crc kubenswrapper[5002]: I0930 12:54:16.695838 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5942273-91c1-4cda-9619-63cee7b422b0" path="/var/lib/kubelet/pods/e5942273-91c1-4cda-9619-63cee7b422b0/volumes" Sep 30 12:55:27 crc kubenswrapper[5002]: I0930 12:55:27.570029 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-l5jr6"] Sep 30 12:55:27 crc kubenswrapper[5002]: E0930 12:55:27.571086 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5942273-91c1-4cda-9619-63cee7b422b0" containerName="registry-server" Sep 30 12:55:27 crc kubenswrapper[5002]: I0930 12:55:27.571114 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5942273-91c1-4cda-9619-63cee7b422b0" containerName="registry-server" Sep 30 12:55:27 crc kubenswrapper[5002]: E0930 12:55:27.571129 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5942273-91c1-4cda-9619-63cee7b422b0" containerName="extract-utilities" Sep 30 12:55:27 crc kubenswrapper[5002]: I0930 12:55:27.571136 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5942273-91c1-4cda-9619-63cee7b422b0" containerName="extract-utilities" Sep 30 12:55:27 crc kubenswrapper[5002]: E0930 12:55:27.571142 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5942273-91c1-4cda-9619-63cee7b422b0" containerName="extract-content" Sep 30 12:55:27 crc kubenswrapper[5002]: I0930 12:55:27.571151 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5942273-91c1-4cda-9619-63cee7b422b0" containerName="extract-content" Sep 30 12:55:27 crc kubenswrapper[5002]: I0930 12:55:27.571385 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5942273-91c1-4cda-9619-63cee7b422b0" containerName="registry-server" Sep 30 12:55:27 crc kubenswrapper[5002]: I0930 12:55:27.573195 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l5jr6" Sep 30 12:55:27 crc kubenswrapper[5002]: I0930 12:55:27.582183 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-l5jr6"] Sep 30 12:55:27 crc kubenswrapper[5002]: I0930 12:55:27.712346 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/add63184-f23e-409b-9a5c-3bd04411a22d-catalog-content\") pod \"community-operators-l5jr6\" (UID: \"add63184-f23e-409b-9a5c-3bd04411a22d\") " pod="openshift-marketplace/community-operators-l5jr6" Sep 30 12:55:27 crc kubenswrapper[5002]: I0930 12:55:27.712410 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kp45t\" (UniqueName: \"kubernetes.io/projected/add63184-f23e-409b-9a5c-3bd04411a22d-kube-api-access-kp45t\") pod \"community-operators-l5jr6\" (UID: \"add63184-f23e-409b-9a5c-3bd04411a22d\") " pod="openshift-marketplace/community-operators-l5jr6" Sep 30 12:55:27 crc kubenswrapper[5002]: I0930 12:55:27.712574 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/add63184-f23e-409b-9a5c-3bd04411a22d-utilities\") pod \"community-operators-l5jr6\" (UID: \"add63184-f23e-409b-9a5c-3bd04411a22d\") " pod="openshift-marketplace/community-operators-l5jr6" Sep 30 12:55:27 crc kubenswrapper[5002]: I0930 12:55:27.813769 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kp45t\" (UniqueName: \"kubernetes.io/projected/add63184-f23e-409b-9a5c-3bd04411a22d-kube-api-access-kp45t\") pod \"community-operators-l5jr6\" (UID: \"add63184-f23e-409b-9a5c-3bd04411a22d\") " pod="openshift-marketplace/community-operators-l5jr6" Sep 30 12:55:27 crc kubenswrapper[5002]: I0930 12:55:27.814859 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/add63184-f23e-409b-9a5c-3bd04411a22d-utilities\") pod \"community-operators-l5jr6\" (UID: \"add63184-f23e-409b-9a5c-3bd04411a22d\") " pod="openshift-marketplace/community-operators-l5jr6" Sep 30 12:55:27 crc kubenswrapper[5002]: I0930 12:55:27.815277 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/add63184-f23e-409b-9a5c-3bd04411a22d-catalog-content\") pod \"community-operators-l5jr6\" (UID: \"add63184-f23e-409b-9a5c-3bd04411a22d\") " pod="openshift-marketplace/community-operators-l5jr6" Sep 30 12:55:27 crc kubenswrapper[5002]: I0930 12:55:27.815450 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/add63184-f23e-409b-9a5c-3bd04411a22d-utilities\") pod \"community-operators-l5jr6\" (UID: \"add63184-f23e-409b-9a5c-3bd04411a22d\") " pod="openshift-marketplace/community-operators-l5jr6" Sep 30 12:55:27 crc kubenswrapper[5002]: I0930 12:55:27.816011 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/add63184-f23e-409b-9a5c-3bd04411a22d-catalog-content\") pod \"community-operators-l5jr6\" (UID: \"add63184-f23e-409b-9a5c-3bd04411a22d\") " pod="openshift-marketplace/community-operators-l5jr6" Sep 30 12:55:27 crc kubenswrapper[5002]: I0930 12:55:27.838796 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kp45t\" (UniqueName: \"kubernetes.io/projected/add63184-f23e-409b-9a5c-3bd04411a22d-kube-api-access-kp45t\") pod \"community-operators-l5jr6\" (UID: \"add63184-f23e-409b-9a5c-3bd04411a22d\") " pod="openshift-marketplace/community-operators-l5jr6" Sep 30 12:55:27 crc kubenswrapper[5002]: I0930 12:55:27.893532 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l5jr6" Sep 30 12:55:28 crc kubenswrapper[5002]: I0930 12:55:28.428172 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-l5jr6"] Sep 30 12:55:28 crc kubenswrapper[5002]: I0930 12:55:28.721123 5002 generic.go:334] "Generic (PLEG): container finished" podID="add63184-f23e-409b-9a5c-3bd04411a22d" containerID="fdf54585e11920c74d7d01a5bb9ccf4b2f1e9883280d053d649200b724fc3918" exitCode=0 Sep 30 12:55:28 crc kubenswrapper[5002]: I0930 12:55:28.721300 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l5jr6" event={"ID":"add63184-f23e-409b-9a5c-3bd04411a22d","Type":"ContainerDied","Data":"fdf54585e11920c74d7d01a5bb9ccf4b2f1e9883280d053d649200b724fc3918"} Sep 30 12:55:28 crc kubenswrapper[5002]: I0930 12:55:28.721365 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l5jr6" event={"ID":"add63184-f23e-409b-9a5c-3bd04411a22d","Type":"ContainerStarted","Data":"650f0b4a7f3165d6ddda49a4688471bb07c8b97f656d6a6cf2b78a6d01061404"} Sep 30 12:55:29 crc kubenswrapper[5002]: I0930 12:55:29.739770 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l5jr6" event={"ID":"add63184-f23e-409b-9a5c-3bd04411a22d","Type":"ContainerStarted","Data":"928f54ebd1847026db0272d2ae4261df3d607f58d6605024bdba85babf032a57"} Sep 30 12:55:30 crc kubenswrapper[5002]: I0930 12:55:30.753047 5002 generic.go:334] "Generic (PLEG): container finished" podID="add63184-f23e-409b-9a5c-3bd04411a22d" containerID="928f54ebd1847026db0272d2ae4261df3d607f58d6605024bdba85babf032a57" exitCode=0 Sep 30 12:55:30 crc kubenswrapper[5002]: I0930 12:55:30.753090 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l5jr6" event={"ID":"add63184-f23e-409b-9a5c-3bd04411a22d","Type":"ContainerDied","Data":"928f54ebd1847026db0272d2ae4261df3d607f58d6605024bdba85babf032a57"} Sep 30 12:55:31 crc kubenswrapper[5002]: I0930 12:55:31.763698 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l5jr6" event={"ID":"add63184-f23e-409b-9a5c-3bd04411a22d","Type":"ContainerStarted","Data":"27564bcba657d0824f80fd939f1d07cc77410e941de793188af45a6f1b2b6d47"} Sep 30 12:55:31 crc kubenswrapper[5002]: I0930 12:55:31.790363 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-l5jr6" podStartSLOduration=2.041386699 podStartE2EDuration="4.790345948s" podCreationTimestamp="2025-09-30 12:55:27 +0000 UTC" firstStartedPulling="2025-09-30 12:55:28.723249247 +0000 UTC m=+2102.972931393" lastFinishedPulling="2025-09-30 12:55:31.472208496 +0000 UTC m=+2105.721890642" observedRunningTime="2025-09-30 12:55:31.782512959 +0000 UTC m=+2106.032195115" watchObservedRunningTime="2025-09-30 12:55:31.790345948 +0000 UTC m=+2106.040028094" Sep 30 12:55:32 crc kubenswrapper[5002]: I0930 12:55:32.098061 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:55:32 crc kubenswrapper[5002]: I0930 12:55:32.098120 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:55:32 crc kubenswrapper[5002]: I0930 12:55:32.560948 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-j8fgp"] Sep 30 12:55:32 crc kubenswrapper[5002]: I0930 12:55:32.573923 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j8fgp" Sep 30 12:55:32 crc kubenswrapper[5002]: I0930 12:55:32.583094 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j8fgp"] Sep 30 12:55:32 crc kubenswrapper[5002]: I0930 12:55:32.705661 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/319e27ce-2241-42de-90a6-e551d89bd632-catalog-content\") pod \"redhat-marketplace-j8fgp\" (UID: \"319e27ce-2241-42de-90a6-e551d89bd632\") " pod="openshift-marketplace/redhat-marketplace-j8fgp" Sep 30 12:55:32 crc kubenswrapper[5002]: I0930 12:55:32.705844 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/319e27ce-2241-42de-90a6-e551d89bd632-utilities\") pod \"redhat-marketplace-j8fgp\" (UID: \"319e27ce-2241-42de-90a6-e551d89bd632\") " pod="openshift-marketplace/redhat-marketplace-j8fgp" Sep 30 12:55:32 crc kubenswrapper[5002]: I0930 12:55:32.705879 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wp58z\" (UniqueName: \"kubernetes.io/projected/319e27ce-2241-42de-90a6-e551d89bd632-kube-api-access-wp58z\") pod \"redhat-marketplace-j8fgp\" (UID: \"319e27ce-2241-42de-90a6-e551d89bd632\") " pod="openshift-marketplace/redhat-marketplace-j8fgp" Sep 30 12:55:32 crc kubenswrapper[5002]: I0930 12:55:32.807822 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/319e27ce-2241-42de-90a6-e551d89bd632-utilities\") pod \"redhat-marketplace-j8fgp\" (UID: \"319e27ce-2241-42de-90a6-e551d89bd632\") " pod="openshift-marketplace/redhat-marketplace-j8fgp" Sep 30 12:55:32 crc kubenswrapper[5002]: I0930 12:55:32.807880 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wp58z\" (UniqueName: \"kubernetes.io/projected/319e27ce-2241-42de-90a6-e551d89bd632-kube-api-access-wp58z\") pod \"redhat-marketplace-j8fgp\" (UID: \"319e27ce-2241-42de-90a6-e551d89bd632\") " pod="openshift-marketplace/redhat-marketplace-j8fgp" Sep 30 12:55:32 crc kubenswrapper[5002]: I0930 12:55:32.807999 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/319e27ce-2241-42de-90a6-e551d89bd632-catalog-content\") pod \"redhat-marketplace-j8fgp\" (UID: \"319e27ce-2241-42de-90a6-e551d89bd632\") " pod="openshift-marketplace/redhat-marketplace-j8fgp" Sep 30 12:55:32 crc kubenswrapper[5002]: I0930 12:55:32.808425 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/319e27ce-2241-42de-90a6-e551d89bd632-catalog-content\") pod \"redhat-marketplace-j8fgp\" (UID: \"319e27ce-2241-42de-90a6-e551d89bd632\") " pod="openshift-marketplace/redhat-marketplace-j8fgp" Sep 30 12:55:32 crc kubenswrapper[5002]: I0930 12:55:32.808666 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/319e27ce-2241-42de-90a6-e551d89bd632-utilities\") pod \"redhat-marketplace-j8fgp\" (UID: \"319e27ce-2241-42de-90a6-e551d89bd632\") " pod="openshift-marketplace/redhat-marketplace-j8fgp" Sep 30 12:55:32 crc kubenswrapper[5002]: I0930 12:55:32.827995 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wp58z\" (UniqueName: \"kubernetes.io/projected/319e27ce-2241-42de-90a6-e551d89bd632-kube-api-access-wp58z\") pod \"redhat-marketplace-j8fgp\" (UID: \"319e27ce-2241-42de-90a6-e551d89bd632\") " pod="openshift-marketplace/redhat-marketplace-j8fgp" Sep 30 12:55:32 crc kubenswrapper[5002]: I0930 12:55:32.897269 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j8fgp" Sep 30 12:55:33 crc kubenswrapper[5002]: I0930 12:55:33.342496 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j8fgp"] Sep 30 12:55:33 crc kubenswrapper[5002]: W0930 12:55:33.345552 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod319e27ce_2241_42de_90a6_e551d89bd632.slice/crio-67f1760d8c419da499f58465d6f249cbb3f4c7dd71cd6f5d65b1ac1201b32fc0 WatchSource:0}: Error finding container 67f1760d8c419da499f58465d6f249cbb3f4c7dd71cd6f5d65b1ac1201b32fc0: Status 404 returned error can't find the container with id 67f1760d8c419da499f58465d6f249cbb3f4c7dd71cd6f5d65b1ac1201b32fc0 Sep 30 12:55:33 crc kubenswrapper[5002]: I0930 12:55:33.784683 5002 generic.go:334] "Generic (PLEG): container finished" podID="319e27ce-2241-42de-90a6-e551d89bd632" containerID="957f96d8ef21b35f76b0050691c3d668f42bc813dadb5ba37c242319b8dabee6" exitCode=0 Sep 30 12:55:33 crc kubenswrapper[5002]: I0930 12:55:33.784759 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j8fgp" event={"ID":"319e27ce-2241-42de-90a6-e551d89bd632","Type":"ContainerDied","Data":"957f96d8ef21b35f76b0050691c3d668f42bc813dadb5ba37c242319b8dabee6"} Sep 30 12:55:33 crc kubenswrapper[5002]: I0930 12:55:33.785270 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j8fgp" event={"ID":"319e27ce-2241-42de-90a6-e551d89bd632","Type":"ContainerStarted","Data":"67f1760d8c419da499f58465d6f249cbb3f4c7dd71cd6f5d65b1ac1201b32fc0"} Sep 30 12:55:34 crc kubenswrapper[5002]: I0930 12:55:34.796212 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j8fgp" event={"ID":"319e27ce-2241-42de-90a6-e551d89bd632","Type":"ContainerStarted","Data":"f4e38da9a113ee8c7377f1e46c1edc9b857c9efe0fcebce9890ad958e84e3ac0"} Sep 30 12:55:35 crc kubenswrapper[5002]: I0930 12:55:35.807311 5002 generic.go:334] "Generic (PLEG): container finished" podID="319e27ce-2241-42de-90a6-e551d89bd632" containerID="f4e38da9a113ee8c7377f1e46c1edc9b857c9efe0fcebce9890ad958e84e3ac0" exitCode=0 Sep 30 12:55:35 crc kubenswrapper[5002]: I0930 12:55:35.807425 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j8fgp" event={"ID":"319e27ce-2241-42de-90a6-e551d89bd632","Type":"ContainerDied","Data":"f4e38da9a113ee8c7377f1e46c1edc9b857c9efe0fcebce9890ad958e84e3ac0"} Sep 30 12:55:37 crc kubenswrapper[5002]: I0930 12:55:37.833682 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j8fgp" event={"ID":"319e27ce-2241-42de-90a6-e551d89bd632","Type":"ContainerStarted","Data":"779cc27cca489f0e78ca7de9b0a803e86b6be7f2740d3ef76e2ebb8d47aa7c1a"} Sep 30 12:55:37 crc kubenswrapper[5002]: I0930 12:55:37.856164 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-j8fgp" podStartSLOduration=2.817588406 podStartE2EDuration="5.856144582s" podCreationTimestamp="2025-09-30 12:55:32 +0000 UTC" firstStartedPulling="2025-09-30 12:55:33.786356612 +0000 UTC m=+2108.036038758" lastFinishedPulling="2025-09-30 12:55:36.824912778 +0000 UTC m=+2111.074594934" observedRunningTime="2025-09-30 12:55:37.850536299 +0000 UTC m=+2112.100218445" watchObservedRunningTime="2025-09-30 12:55:37.856144582 +0000 UTC m=+2112.105826728" Sep 30 12:55:37 crc kubenswrapper[5002]: I0930 12:55:37.894598 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-l5jr6" Sep 30 12:55:37 crc kubenswrapper[5002]: I0930 12:55:37.894872 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-l5jr6" Sep 30 12:55:37 crc kubenswrapper[5002]: I0930 12:55:37.943330 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-l5jr6" Sep 30 12:55:38 crc kubenswrapper[5002]: I0930 12:55:38.343936 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4t5fp"] Sep 30 12:55:38 crc kubenswrapper[5002]: I0930 12:55:38.346330 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4t5fp" Sep 30 12:55:38 crc kubenswrapper[5002]: I0930 12:55:38.353566 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4t5fp"] Sep 30 12:55:38 crc kubenswrapper[5002]: I0930 12:55:38.411379 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ef31068-5bb0-498d-8d1e-6ff6c7ca2541-utilities\") pod \"certified-operators-4t5fp\" (UID: \"3ef31068-5bb0-498d-8d1e-6ff6c7ca2541\") " pod="openshift-marketplace/certified-operators-4t5fp" Sep 30 12:55:38 crc kubenswrapper[5002]: I0930 12:55:38.411766 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ef31068-5bb0-498d-8d1e-6ff6c7ca2541-catalog-content\") pod \"certified-operators-4t5fp\" (UID: \"3ef31068-5bb0-498d-8d1e-6ff6c7ca2541\") " pod="openshift-marketplace/certified-operators-4t5fp" Sep 30 12:55:38 crc kubenswrapper[5002]: I0930 12:55:38.411860 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28pkt\" (UniqueName: \"kubernetes.io/projected/3ef31068-5bb0-498d-8d1e-6ff6c7ca2541-kube-api-access-28pkt\") pod \"certified-operators-4t5fp\" (UID: \"3ef31068-5bb0-498d-8d1e-6ff6c7ca2541\") " pod="openshift-marketplace/certified-operators-4t5fp" Sep 30 12:55:38 crc kubenswrapper[5002]: I0930 12:55:38.513758 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ef31068-5bb0-498d-8d1e-6ff6c7ca2541-utilities\") pod \"certified-operators-4t5fp\" (UID: \"3ef31068-5bb0-498d-8d1e-6ff6c7ca2541\") " pod="openshift-marketplace/certified-operators-4t5fp" Sep 30 12:55:38 crc kubenswrapper[5002]: I0930 12:55:38.514048 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ef31068-5bb0-498d-8d1e-6ff6c7ca2541-catalog-content\") pod \"certified-operators-4t5fp\" (UID: \"3ef31068-5bb0-498d-8d1e-6ff6c7ca2541\") " pod="openshift-marketplace/certified-operators-4t5fp" Sep 30 12:55:38 crc kubenswrapper[5002]: I0930 12:55:38.514208 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28pkt\" (UniqueName: \"kubernetes.io/projected/3ef31068-5bb0-498d-8d1e-6ff6c7ca2541-kube-api-access-28pkt\") pod \"certified-operators-4t5fp\" (UID: \"3ef31068-5bb0-498d-8d1e-6ff6c7ca2541\") " pod="openshift-marketplace/certified-operators-4t5fp" Sep 30 12:55:38 crc kubenswrapper[5002]: I0930 12:55:38.514332 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ef31068-5bb0-498d-8d1e-6ff6c7ca2541-utilities\") pod \"certified-operators-4t5fp\" (UID: \"3ef31068-5bb0-498d-8d1e-6ff6c7ca2541\") " pod="openshift-marketplace/certified-operators-4t5fp" Sep 30 12:55:38 crc kubenswrapper[5002]: I0930 12:55:38.514910 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ef31068-5bb0-498d-8d1e-6ff6c7ca2541-catalog-content\") pod \"certified-operators-4t5fp\" (UID: \"3ef31068-5bb0-498d-8d1e-6ff6c7ca2541\") " pod="openshift-marketplace/certified-operators-4t5fp" Sep 30 12:55:38 crc kubenswrapper[5002]: I0930 12:55:38.537690 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28pkt\" (UniqueName: \"kubernetes.io/projected/3ef31068-5bb0-498d-8d1e-6ff6c7ca2541-kube-api-access-28pkt\") pod \"certified-operators-4t5fp\" (UID: \"3ef31068-5bb0-498d-8d1e-6ff6c7ca2541\") " pod="openshift-marketplace/certified-operators-4t5fp" Sep 30 12:55:38 crc kubenswrapper[5002]: I0930 12:55:38.673395 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4t5fp" Sep 30 12:55:38 crc kubenswrapper[5002]: I0930 12:55:38.917742 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-l5jr6" Sep 30 12:55:38 crc kubenswrapper[5002]: I0930 12:55:38.981215 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4t5fp"] Sep 30 12:55:38 crc kubenswrapper[5002]: W0930 12:55:38.988918 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3ef31068_5bb0_498d_8d1e_6ff6c7ca2541.slice/crio-258ebab86872a1a178e0be432461c1365cddcfa25e1dc111421a68563c5cd09f WatchSource:0}: Error finding container 258ebab86872a1a178e0be432461c1365cddcfa25e1dc111421a68563c5cd09f: Status 404 returned error can't find the container with id 258ebab86872a1a178e0be432461c1365cddcfa25e1dc111421a68563c5cd09f Sep 30 12:55:39 crc kubenswrapper[5002]: I0930 12:55:39.860245 5002 generic.go:334] "Generic (PLEG): container finished" podID="3ef31068-5bb0-498d-8d1e-6ff6c7ca2541" containerID="3653664761d4eed18f30e39cec7bcea5408cef3331c53302d8cd1ab1466fdfd8" exitCode=0 Sep 30 12:55:39 crc kubenswrapper[5002]: I0930 12:55:39.860617 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4t5fp" event={"ID":"3ef31068-5bb0-498d-8d1e-6ff6c7ca2541","Type":"ContainerDied","Data":"3653664761d4eed18f30e39cec7bcea5408cef3331c53302d8cd1ab1466fdfd8"} Sep 30 12:55:39 crc kubenswrapper[5002]: I0930 12:55:39.860659 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4t5fp" event={"ID":"3ef31068-5bb0-498d-8d1e-6ff6c7ca2541","Type":"ContainerStarted","Data":"258ebab86872a1a178e0be432461c1365cddcfa25e1dc111421a68563c5cd09f"} Sep 30 12:55:40 crc kubenswrapper[5002]: I0930 12:55:40.743735 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-l5jr6"] Sep 30 12:55:40 crc kubenswrapper[5002]: I0930 12:55:40.872383 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4t5fp" event={"ID":"3ef31068-5bb0-498d-8d1e-6ff6c7ca2541","Type":"ContainerStarted","Data":"5ce6f63db40384e402319345e561b085d85c8a6cf15026d119ca2ddffe44797f"} Sep 30 12:55:41 crc kubenswrapper[5002]: I0930 12:55:41.881773 5002 generic.go:334] "Generic (PLEG): container finished" podID="3ef31068-5bb0-498d-8d1e-6ff6c7ca2541" containerID="5ce6f63db40384e402319345e561b085d85c8a6cf15026d119ca2ddffe44797f" exitCode=0 Sep 30 12:55:41 crc kubenswrapper[5002]: I0930 12:55:41.881853 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4t5fp" event={"ID":"3ef31068-5bb0-498d-8d1e-6ff6c7ca2541","Type":"ContainerDied","Data":"5ce6f63db40384e402319345e561b085d85c8a6cf15026d119ca2ddffe44797f"} Sep 30 12:55:41 crc kubenswrapper[5002]: I0930 12:55:41.882265 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-l5jr6" podUID="add63184-f23e-409b-9a5c-3bd04411a22d" containerName="registry-server" containerID="cri-o://27564bcba657d0824f80fd939f1d07cc77410e941de793188af45a6f1b2b6d47" gracePeriod=2 Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.345856 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l5jr6" Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.497020 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kp45t\" (UniqueName: \"kubernetes.io/projected/add63184-f23e-409b-9a5c-3bd04411a22d-kube-api-access-kp45t\") pod \"add63184-f23e-409b-9a5c-3bd04411a22d\" (UID: \"add63184-f23e-409b-9a5c-3bd04411a22d\") " Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.497354 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/add63184-f23e-409b-9a5c-3bd04411a22d-catalog-content\") pod \"add63184-f23e-409b-9a5c-3bd04411a22d\" (UID: \"add63184-f23e-409b-9a5c-3bd04411a22d\") " Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.497452 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/add63184-f23e-409b-9a5c-3bd04411a22d-utilities\") pod \"add63184-f23e-409b-9a5c-3bd04411a22d\" (UID: \"add63184-f23e-409b-9a5c-3bd04411a22d\") " Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.498628 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/add63184-f23e-409b-9a5c-3bd04411a22d-utilities" (OuterVolumeSpecName: "utilities") pod "add63184-f23e-409b-9a5c-3bd04411a22d" (UID: "add63184-f23e-409b-9a5c-3bd04411a22d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.505312 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/add63184-f23e-409b-9a5c-3bd04411a22d-kube-api-access-kp45t" (OuterVolumeSpecName: "kube-api-access-kp45t") pod "add63184-f23e-409b-9a5c-3bd04411a22d" (UID: "add63184-f23e-409b-9a5c-3bd04411a22d"). InnerVolumeSpecName "kube-api-access-kp45t". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.547667 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/add63184-f23e-409b-9a5c-3bd04411a22d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "add63184-f23e-409b-9a5c-3bd04411a22d" (UID: "add63184-f23e-409b-9a5c-3bd04411a22d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.600522 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/add63184-f23e-409b-9a5c-3bd04411a22d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.600561 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/add63184-f23e-409b-9a5c-3bd04411a22d-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.600574 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kp45t\" (UniqueName: \"kubernetes.io/projected/add63184-f23e-409b-9a5c-3bd04411a22d-kube-api-access-kp45t\") on node \"crc\" DevicePath \"\"" Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.894971 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4t5fp" event={"ID":"3ef31068-5bb0-498d-8d1e-6ff6c7ca2541","Type":"ContainerStarted","Data":"abb6900d289202ade25969bdb671915bec42deb7d0b6f18b7e11dbebaac50c99"} Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.897519 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-j8fgp" Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.897758 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-j8fgp" Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.898723 5002 generic.go:334] "Generic (PLEG): container finished" podID="add63184-f23e-409b-9a5c-3bd04411a22d" containerID="27564bcba657d0824f80fd939f1d07cc77410e941de793188af45a6f1b2b6d47" exitCode=0 Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.898761 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l5jr6" event={"ID":"add63184-f23e-409b-9a5c-3bd04411a22d","Type":"ContainerDied","Data":"27564bcba657d0824f80fd939f1d07cc77410e941de793188af45a6f1b2b6d47"} Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.898783 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l5jr6" event={"ID":"add63184-f23e-409b-9a5c-3bd04411a22d","Type":"ContainerDied","Data":"650f0b4a7f3165d6ddda49a4688471bb07c8b97f656d6a6cf2b78a6d01061404"} Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.898791 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l5jr6" Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.898800 5002 scope.go:117] "RemoveContainer" containerID="27564bcba657d0824f80fd939f1d07cc77410e941de793188af45a6f1b2b6d47" Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.922061 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4t5fp" podStartSLOduration=2.4321110040000002 podStartE2EDuration="4.922038276s" podCreationTimestamp="2025-09-30 12:55:38 +0000 UTC" firstStartedPulling="2025-09-30 12:55:39.863859836 +0000 UTC m=+2114.113542002" lastFinishedPulling="2025-09-30 12:55:42.353787128 +0000 UTC m=+2116.603469274" observedRunningTime="2025-09-30 12:55:42.910956007 +0000 UTC m=+2117.160638173" watchObservedRunningTime="2025-09-30 12:55:42.922038276 +0000 UTC m=+2117.171720422" Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.934913 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-l5jr6"] Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.934953 5002 scope.go:117] "RemoveContainer" containerID="928f54ebd1847026db0272d2ae4261df3d607f58d6605024bdba85babf032a57" Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.941504 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-l5jr6"] Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.957864 5002 scope.go:117] "RemoveContainer" containerID="fdf54585e11920c74d7d01a5bb9ccf4b2f1e9883280d053d649200b724fc3918" Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.959461 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-j8fgp" Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.979927 5002 scope.go:117] "RemoveContainer" containerID="27564bcba657d0824f80fd939f1d07cc77410e941de793188af45a6f1b2b6d47" Sep 30 12:55:42 crc kubenswrapper[5002]: E0930 12:55:42.980528 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27564bcba657d0824f80fd939f1d07cc77410e941de793188af45a6f1b2b6d47\": container with ID starting with 27564bcba657d0824f80fd939f1d07cc77410e941de793188af45a6f1b2b6d47 not found: ID does not exist" containerID="27564bcba657d0824f80fd939f1d07cc77410e941de793188af45a6f1b2b6d47" Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.980583 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27564bcba657d0824f80fd939f1d07cc77410e941de793188af45a6f1b2b6d47"} err="failed to get container status \"27564bcba657d0824f80fd939f1d07cc77410e941de793188af45a6f1b2b6d47\": rpc error: code = NotFound desc = could not find container \"27564bcba657d0824f80fd939f1d07cc77410e941de793188af45a6f1b2b6d47\": container with ID starting with 27564bcba657d0824f80fd939f1d07cc77410e941de793188af45a6f1b2b6d47 not found: ID does not exist" Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.980621 5002 scope.go:117] "RemoveContainer" containerID="928f54ebd1847026db0272d2ae4261df3d607f58d6605024bdba85babf032a57" Sep 30 12:55:42 crc kubenswrapper[5002]: E0930 12:55:42.980958 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"928f54ebd1847026db0272d2ae4261df3d607f58d6605024bdba85babf032a57\": container with ID starting with 928f54ebd1847026db0272d2ae4261df3d607f58d6605024bdba85babf032a57 not found: ID does not exist" containerID="928f54ebd1847026db0272d2ae4261df3d607f58d6605024bdba85babf032a57" Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.980990 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"928f54ebd1847026db0272d2ae4261df3d607f58d6605024bdba85babf032a57"} err="failed to get container status \"928f54ebd1847026db0272d2ae4261df3d607f58d6605024bdba85babf032a57\": rpc error: code = NotFound desc = could not find container \"928f54ebd1847026db0272d2ae4261df3d607f58d6605024bdba85babf032a57\": container with ID starting with 928f54ebd1847026db0272d2ae4261df3d607f58d6605024bdba85babf032a57 not found: ID does not exist" Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.981012 5002 scope.go:117] "RemoveContainer" containerID="fdf54585e11920c74d7d01a5bb9ccf4b2f1e9883280d053d649200b724fc3918" Sep 30 12:55:42 crc kubenswrapper[5002]: E0930 12:55:42.981317 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fdf54585e11920c74d7d01a5bb9ccf4b2f1e9883280d053d649200b724fc3918\": container with ID starting with fdf54585e11920c74d7d01a5bb9ccf4b2f1e9883280d053d649200b724fc3918 not found: ID does not exist" containerID="fdf54585e11920c74d7d01a5bb9ccf4b2f1e9883280d053d649200b724fc3918" Sep 30 12:55:42 crc kubenswrapper[5002]: I0930 12:55:42.981354 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fdf54585e11920c74d7d01a5bb9ccf4b2f1e9883280d053d649200b724fc3918"} err="failed to get container status \"fdf54585e11920c74d7d01a5bb9ccf4b2f1e9883280d053d649200b724fc3918\": rpc error: code = NotFound desc = could not find container \"fdf54585e11920c74d7d01a5bb9ccf4b2f1e9883280d053d649200b724fc3918\": container with ID starting with fdf54585e11920c74d7d01a5bb9ccf4b2f1e9883280d053d649200b724fc3918 not found: ID does not exist" Sep 30 12:55:43 crc kubenswrapper[5002]: I0930 12:55:43.977840 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-j8fgp" Sep 30 12:55:44 crc kubenswrapper[5002]: I0930 12:55:44.691192 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="add63184-f23e-409b-9a5c-3bd04411a22d" path="/var/lib/kubelet/pods/add63184-f23e-409b-9a5c-3bd04411a22d/volumes" Sep 30 12:55:46 crc kubenswrapper[5002]: I0930 12:55:46.134401 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j8fgp"] Sep 30 12:55:46 crc kubenswrapper[5002]: I0930 12:55:46.946716 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-j8fgp" podUID="319e27ce-2241-42de-90a6-e551d89bd632" containerName="registry-server" containerID="cri-o://779cc27cca489f0e78ca7de9b0a803e86b6be7f2740d3ef76e2ebb8d47aa7c1a" gracePeriod=2 Sep 30 12:55:47 crc kubenswrapper[5002]: I0930 12:55:47.410130 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j8fgp" Sep 30 12:55:47 crc kubenswrapper[5002]: I0930 12:55:47.493456 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wp58z\" (UniqueName: \"kubernetes.io/projected/319e27ce-2241-42de-90a6-e551d89bd632-kube-api-access-wp58z\") pod \"319e27ce-2241-42de-90a6-e551d89bd632\" (UID: \"319e27ce-2241-42de-90a6-e551d89bd632\") " Sep 30 12:55:47 crc kubenswrapper[5002]: I0930 12:55:47.493646 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/319e27ce-2241-42de-90a6-e551d89bd632-catalog-content\") pod \"319e27ce-2241-42de-90a6-e551d89bd632\" (UID: \"319e27ce-2241-42de-90a6-e551d89bd632\") " Sep 30 12:55:47 crc kubenswrapper[5002]: I0930 12:55:47.493749 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/319e27ce-2241-42de-90a6-e551d89bd632-utilities\") pod \"319e27ce-2241-42de-90a6-e551d89bd632\" (UID: \"319e27ce-2241-42de-90a6-e551d89bd632\") " Sep 30 12:55:47 crc kubenswrapper[5002]: I0930 12:55:47.495098 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/319e27ce-2241-42de-90a6-e551d89bd632-utilities" (OuterVolumeSpecName: "utilities") pod "319e27ce-2241-42de-90a6-e551d89bd632" (UID: "319e27ce-2241-42de-90a6-e551d89bd632"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:55:47 crc kubenswrapper[5002]: I0930 12:55:47.498874 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/319e27ce-2241-42de-90a6-e551d89bd632-kube-api-access-wp58z" (OuterVolumeSpecName: "kube-api-access-wp58z") pod "319e27ce-2241-42de-90a6-e551d89bd632" (UID: "319e27ce-2241-42de-90a6-e551d89bd632"). InnerVolumeSpecName "kube-api-access-wp58z". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:55:47 crc kubenswrapper[5002]: I0930 12:55:47.511606 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/319e27ce-2241-42de-90a6-e551d89bd632-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "319e27ce-2241-42de-90a6-e551d89bd632" (UID: "319e27ce-2241-42de-90a6-e551d89bd632"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:55:47 crc kubenswrapper[5002]: I0930 12:55:47.595970 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/319e27ce-2241-42de-90a6-e551d89bd632-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 12:55:47 crc kubenswrapper[5002]: I0930 12:55:47.596020 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/319e27ce-2241-42de-90a6-e551d89bd632-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 12:55:47 crc kubenswrapper[5002]: I0930 12:55:47.596034 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wp58z\" (UniqueName: \"kubernetes.io/projected/319e27ce-2241-42de-90a6-e551d89bd632-kube-api-access-wp58z\") on node \"crc\" DevicePath \"\"" Sep 30 12:55:47 crc kubenswrapper[5002]: I0930 12:55:47.957574 5002 generic.go:334] "Generic (PLEG): container finished" podID="319e27ce-2241-42de-90a6-e551d89bd632" containerID="779cc27cca489f0e78ca7de9b0a803e86b6be7f2740d3ef76e2ebb8d47aa7c1a" exitCode=0 Sep 30 12:55:47 crc kubenswrapper[5002]: I0930 12:55:47.957616 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j8fgp" event={"ID":"319e27ce-2241-42de-90a6-e551d89bd632","Type":"ContainerDied","Data":"779cc27cca489f0e78ca7de9b0a803e86b6be7f2740d3ef76e2ebb8d47aa7c1a"} Sep 30 12:55:47 crc kubenswrapper[5002]: I0930 12:55:47.957641 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j8fgp" event={"ID":"319e27ce-2241-42de-90a6-e551d89bd632","Type":"ContainerDied","Data":"67f1760d8c419da499f58465d6f249cbb3f4c7dd71cd6f5d65b1ac1201b32fc0"} Sep 30 12:55:47 crc kubenswrapper[5002]: I0930 12:55:47.957658 5002 scope.go:117] "RemoveContainer" containerID="779cc27cca489f0e78ca7de9b0a803e86b6be7f2740d3ef76e2ebb8d47aa7c1a" Sep 30 12:55:47 crc kubenswrapper[5002]: I0930 12:55:47.957805 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j8fgp" Sep 30 12:55:47 crc kubenswrapper[5002]: I0930 12:55:47.991336 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j8fgp"] Sep 30 12:55:47 crc kubenswrapper[5002]: I0930 12:55:47.996705 5002 scope.go:117] "RemoveContainer" containerID="f4e38da9a113ee8c7377f1e46c1edc9b857c9efe0fcebce9890ad958e84e3ac0" Sep 30 12:55:47 crc kubenswrapper[5002]: I0930 12:55:47.998877 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-j8fgp"] Sep 30 12:55:48 crc kubenswrapper[5002]: I0930 12:55:48.020438 5002 scope.go:117] "RemoveContainer" containerID="957f96d8ef21b35f76b0050691c3d668f42bc813dadb5ba37c242319b8dabee6" Sep 30 12:55:48 crc kubenswrapper[5002]: I0930 12:55:48.059976 5002 scope.go:117] "RemoveContainer" containerID="779cc27cca489f0e78ca7de9b0a803e86b6be7f2740d3ef76e2ebb8d47aa7c1a" Sep 30 12:55:48 crc kubenswrapper[5002]: E0930 12:55:48.060332 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"779cc27cca489f0e78ca7de9b0a803e86b6be7f2740d3ef76e2ebb8d47aa7c1a\": container with ID starting with 779cc27cca489f0e78ca7de9b0a803e86b6be7f2740d3ef76e2ebb8d47aa7c1a not found: ID does not exist" containerID="779cc27cca489f0e78ca7de9b0a803e86b6be7f2740d3ef76e2ebb8d47aa7c1a" Sep 30 12:55:48 crc kubenswrapper[5002]: I0930 12:55:48.060378 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"779cc27cca489f0e78ca7de9b0a803e86b6be7f2740d3ef76e2ebb8d47aa7c1a"} err="failed to get container status \"779cc27cca489f0e78ca7de9b0a803e86b6be7f2740d3ef76e2ebb8d47aa7c1a\": rpc error: code = NotFound desc = could not find container \"779cc27cca489f0e78ca7de9b0a803e86b6be7f2740d3ef76e2ebb8d47aa7c1a\": container with ID starting with 779cc27cca489f0e78ca7de9b0a803e86b6be7f2740d3ef76e2ebb8d47aa7c1a not found: ID does not exist" Sep 30 12:55:48 crc kubenswrapper[5002]: I0930 12:55:48.060406 5002 scope.go:117] "RemoveContainer" containerID="f4e38da9a113ee8c7377f1e46c1edc9b857c9efe0fcebce9890ad958e84e3ac0" Sep 30 12:55:48 crc kubenswrapper[5002]: E0930 12:55:48.060842 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4e38da9a113ee8c7377f1e46c1edc9b857c9efe0fcebce9890ad958e84e3ac0\": container with ID starting with f4e38da9a113ee8c7377f1e46c1edc9b857c9efe0fcebce9890ad958e84e3ac0 not found: ID does not exist" containerID="f4e38da9a113ee8c7377f1e46c1edc9b857c9efe0fcebce9890ad958e84e3ac0" Sep 30 12:55:48 crc kubenswrapper[5002]: I0930 12:55:48.060901 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4e38da9a113ee8c7377f1e46c1edc9b857c9efe0fcebce9890ad958e84e3ac0"} err="failed to get container status \"f4e38da9a113ee8c7377f1e46c1edc9b857c9efe0fcebce9890ad958e84e3ac0\": rpc error: code = NotFound desc = could not find container \"f4e38da9a113ee8c7377f1e46c1edc9b857c9efe0fcebce9890ad958e84e3ac0\": container with ID starting with f4e38da9a113ee8c7377f1e46c1edc9b857c9efe0fcebce9890ad958e84e3ac0 not found: ID does not exist" Sep 30 12:55:48 crc kubenswrapper[5002]: I0930 12:55:48.060933 5002 scope.go:117] "RemoveContainer" containerID="957f96d8ef21b35f76b0050691c3d668f42bc813dadb5ba37c242319b8dabee6" Sep 30 12:55:48 crc kubenswrapper[5002]: E0930 12:55:48.061281 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"957f96d8ef21b35f76b0050691c3d668f42bc813dadb5ba37c242319b8dabee6\": container with ID starting with 957f96d8ef21b35f76b0050691c3d668f42bc813dadb5ba37c242319b8dabee6 not found: ID does not exist" containerID="957f96d8ef21b35f76b0050691c3d668f42bc813dadb5ba37c242319b8dabee6" Sep 30 12:55:48 crc kubenswrapper[5002]: I0930 12:55:48.061315 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"957f96d8ef21b35f76b0050691c3d668f42bc813dadb5ba37c242319b8dabee6"} err="failed to get container status \"957f96d8ef21b35f76b0050691c3d668f42bc813dadb5ba37c242319b8dabee6\": rpc error: code = NotFound desc = could not find container \"957f96d8ef21b35f76b0050691c3d668f42bc813dadb5ba37c242319b8dabee6\": container with ID starting with 957f96d8ef21b35f76b0050691c3d668f42bc813dadb5ba37c242319b8dabee6 not found: ID does not exist" Sep 30 12:55:48 crc kubenswrapper[5002]: I0930 12:55:48.674253 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4t5fp" Sep 30 12:55:48 crc kubenswrapper[5002]: I0930 12:55:48.674535 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4t5fp" Sep 30 12:55:48 crc kubenswrapper[5002]: I0930 12:55:48.690233 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="319e27ce-2241-42de-90a6-e551d89bd632" path="/var/lib/kubelet/pods/319e27ce-2241-42de-90a6-e551d89bd632/volumes" Sep 30 12:55:48 crc kubenswrapper[5002]: I0930 12:55:48.732749 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4t5fp" Sep 30 12:55:49 crc kubenswrapper[5002]: I0930 12:55:49.021849 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4t5fp" Sep 30 12:55:50 crc kubenswrapper[5002]: I0930 12:55:50.740692 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4t5fp"] Sep 30 12:55:51 crc kubenswrapper[5002]: I0930 12:55:51.990577 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4t5fp" podUID="3ef31068-5bb0-498d-8d1e-6ff6c7ca2541" containerName="registry-server" containerID="cri-o://abb6900d289202ade25969bdb671915bec42deb7d0b6f18b7e11dbebaac50c99" gracePeriod=2 Sep 30 12:55:53 crc kubenswrapper[5002]: I0930 12:55:53.001717 5002 generic.go:334] "Generic (PLEG): container finished" podID="3ef31068-5bb0-498d-8d1e-6ff6c7ca2541" containerID="abb6900d289202ade25969bdb671915bec42deb7d0b6f18b7e11dbebaac50c99" exitCode=0 Sep 30 12:55:53 crc kubenswrapper[5002]: I0930 12:55:53.001779 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4t5fp" event={"ID":"3ef31068-5bb0-498d-8d1e-6ff6c7ca2541","Type":"ContainerDied","Data":"abb6900d289202ade25969bdb671915bec42deb7d0b6f18b7e11dbebaac50c99"} Sep 30 12:55:53 crc kubenswrapper[5002]: I0930 12:55:53.002258 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4t5fp" event={"ID":"3ef31068-5bb0-498d-8d1e-6ff6c7ca2541","Type":"ContainerDied","Data":"258ebab86872a1a178e0be432461c1365cddcfa25e1dc111421a68563c5cd09f"} Sep 30 12:55:53 crc kubenswrapper[5002]: I0930 12:55:53.002272 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="258ebab86872a1a178e0be432461c1365cddcfa25e1dc111421a68563c5cd09f" Sep 30 12:55:53 crc kubenswrapper[5002]: I0930 12:55:53.054768 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4t5fp" Sep 30 12:55:53 crc kubenswrapper[5002]: I0930 12:55:53.197168 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ef31068-5bb0-498d-8d1e-6ff6c7ca2541-utilities\") pod \"3ef31068-5bb0-498d-8d1e-6ff6c7ca2541\" (UID: \"3ef31068-5bb0-498d-8d1e-6ff6c7ca2541\") " Sep 30 12:55:53 crc kubenswrapper[5002]: I0930 12:55:53.197268 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-28pkt\" (UniqueName: \"kubernetes.io/projected/3ef31068-5bb0-498d-8d1e-6ff6c7ca2541-kube-api-access-28pkt\") pod \"3ef31068-5bb0-498d-8d1e-6ff6c7ca2541\" (UID: \"3ef31068-5bb0-498d-8d1e-6ff6c7ca2541\") " Sep 30 12:55:53 crc kubenswrapper[5002]: I0930 12:55:53.197407 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ef31068-5bb0-498d-8d1e-6ff6c7ca2541-catalog-content\") pod \"3ef31068-5bb0-498d-8d1e-6ff6c7ca2541\" (UID: \"3ef31068-5bb0-498d-8d1e-6ff6c7ca2541\") " Sep 30 12:55:53 crc kubenswrapper[5002]: I0930 12:55:53.198093 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ef31068-5bb0-498d-8d1e-6ff6c7ca2541-utilities" (OuterVolumeSpecName: "utilities") pod "3ef31068-5bb0-498d-8d1e-6ff6c7ca2541" (UID: "3ef31068-5bb0-498d-8d1e-6ff6c7ca2541"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:55:53 crc kubenswrapper[5002]: I0930 12:55:53.199131 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ef31068-5bb0-498d-8d1e-6ff6c7ca2541-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 12:55:53 crc kubenswrapper[5002]: I0930 12:55:53.203044 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ef31068-5bb0-498d-8d1e-6ff6c7ca2541-kube-api-access-28pkt" (OuterVolumeSpecName: "kube-api-access-28pkt") pod "3ef31068-5bb0-498d-8d1e-6ff6c7ca2541" (UID: "3ef31068-5bb0-498d-8d1e-6ff6c7ca2541"). InnerVolumeSpecName "kube-api-access-28pkt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:55:53 crc kubenswrapper[5002]: I0930 12:55:53.238808 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ef31068-5bb0-498d-8d1e-6ff6c7ca2541-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3ef31068-5bb0-498d-8d1e-6ff6c7ca2541" (UID: "3ef31068-5bb0-498d-8d1e-6ff6c7ca2541"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 12:55:53 crc kubenswrapper[5002]: I0930 12:55:53.301423 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-28pkt\" (UniqueName: \"kubernetes.io/projected/3ef31068-5bb0-498d-8d1e-6ff6c7ca2541-kube-api-access-28pkt\") on node \"crc\" DevicePath \"\"" Sep 30 12:55:53 crc kubenswrapper[5002]: I0930 12:55:53.301464 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ef31068-5bb0-498d-8d1e-6ff6c7ca2541-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 12:55:54 crc kubenswrapper[5002]: I0930 12:55:54.010708 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4t5fp" Sep 30 12:55:54 crc kubenswrapper[5002]: I0930 12:55:54.048999 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4t5fp"] Sep 30 12:55:54 crc kubenswrapper[5002]: I0930 12:55:54.057593 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4t5fp"] Sep 30 12:55:54 crc kubenswrapper[5002]: I0930 12:55:54.690249 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ef31068-5bb0-498d-8d1e-6ff6c7ca2541" path="/var/lib/kubelet/pods/3ef31068-5bb0-498d-8d1e-6ff6c7ca2541/volumes" Sep 30 12:56:02 crc kubenswrapper[5002]: I0930 12:56:02.098243 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:56:02 crc kubenswrapper[5002]: I0930 12:56:02.098851 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:56:32 crc kubenswrapper[5002]: I0930 12:56:32.098379 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 12:56:32 crc kubenswrapper[5002]: I0930 12:56:32.099146 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 12:56:32 crc kubenswrapper[5002]: I0930 12:56:32.099218 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" Sep 30 12:56:32 crc kubenswrapper[5002]: I0930 12:56:32.100300 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0"} pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 12:56:32 crc kubenswrapper[5002]: I0930 12:56:32.100403 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" containerID="cri-o://c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0" gracePeriod=600 Sep 30 12:56:32 crc kubenswrapper[5002]: E0930 12:56:32.260130 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:56:32 crc kubenswrapper[5002]: I0930 12:56:32.350549 5002 generic.go:334] "Generic (PLEG): container finished" podID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerID="c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0" exitCode=0 Sep 30 12:56:32 crc kubenswrapper[5002]: I0930 12:56:32.350632 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerDied","Data":"c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0"} Sep 30 12:56:32 crc kubenswrapper[5002]: I0930 12:56:32.351007 5002 scope.go:117] "RemoveContainer" containerID="83f28e70b07c52d080e9624494f8eff05394195f3007fb1fb9e6b51a22eef012" Sep 30 12:56:32 crc kubenswrapper[5002]: I0930 12:56:32.351826 5002 scope.go:117] "RemoveContainer" containerID="c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0" Sep 30 12:56:32 crc kubenswrapper[5002]: E0930 12:56:32.352209 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:56:44 crc kubenswrapper[5002]: I0930 12:56:44.676945 5002 scope.go:117] "RemoveContainer" containerID="c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0" Sep 30 12:56:44 crc kubenswrapper[5002]: E0930 12:56:44.677865 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:56:58 crc kubenswrapper[5002]: I0930 12:56:58.676623 5002 scope.go:117] "RemoveContainer" containerID="c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0" Sep 30 12:56:58 crc kubenswrapper[5002]: E0930 12:56:58.677440 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:57:09 crc kubenswrapper[5002]: I0930 12:57:09.676211 5002 scope.go:117] "RemoveContainer" containerID="c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0" Sep 30 12:57:09 crc kubenswrapper[5002]: E0930 12:57:09.678120 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:57:22 crc kubenswrapper[5002]: I0930 12:57:22.676601 5002 scope.go:117] "RemoveContainer" containerID="c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0" Sep 30 12:57:22 crc kubenswrapper[5002]: E0930 12:57:22.677411 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:57:37 crc kubenswrapper[5002]: I0930 12:57:37.676457 5002 scope.go:117] "RemoveContainer" containerID="c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0" Sep 30 12:57:37 crc kubenswrapper[5002]: E0930 12:57:37.677181 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:57:45 crc kubenswrapper[5002]: I0930 12:57:45.979347 5002 generic.go:334] "Generic (PLEG): container finished" podID="7a8b7e27-6872-47a1-b564-9a288ac7cef0" containerID="adf2f5831d8bbde553ae2303e2087a2c3759e3f7bd16e37e526b2fd6e9bf1cd2" exitCode=0 Sep 30 12:57:45 crc kubenswrapper[5002]: I0930 12:57:45.979425 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v" event={"ID":"7a8b7e27-6872-47a1-b564-9a288ac7cef0","Type":"ContainerDied","Data":"adf2f5831d8bbde553ae2303e2087a2c3759e3f7bd16e37e526b2fd6e9bf1cd2"} Sep 30 12:57:47 crc kubenswrapper[5002]: I0930 12:57:47.356687 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v" Sep 30 12:57:47 crc kubenswrapper[5002]: I0930 12:57:47.417855 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7a8b7e27-6872-47a1-b564-9a288ac7cef0-ssh-key\") pod \"7a8b7e27-6872-47a1-b564-9a288ac7cef0\" (UID: \"7a8b7e27-6872-47a1-b564-9a288ac7cef0\") " Sep 30 12:57:47 crc kubenswrapper[5002]: I0930 12:57:47.418060 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/7a8b7e27-6872-47a1-b564-9a288ac7cef0-libvirt-secret-0\") pod \"7a8b7e27-6872-47a1-b564-9a288ac7cef0\" (UID: \"7a8b7e27-6872-47a1-b564-9a288ac7cef0\") " Sep 30 12:57:47 crc kubenswrapper[5002]: I0930 12:57:47.418155 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bq9h2\" (UniqueName: \"kubernetes.io/projected/7a8b7e27-6872-47a1-b564-9a288ac7cef0-kube-api-access-bq9h2\") pod \"7a8b7e27-6872-47a1-b564-9a288ac7cef0\" (UID: \"7a8b7e27-6872-47a1-b564-9a288ac7cef0\") " Sep 30 12:57:47 crc kubenswrapper[5002]: I0930 12:57:47.418294 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a8b7e27-6872-47a1-b564-9a288ac7cef0-libvirt-combined-ca-bundle\") pod \"7a8b7e27-6872-47a1-b564-9a288ac7cef0\" (UID: \"7a8b7e27-6872-47a1-b564-9a288ac7cef0\") " Sep 30 12:57:47 crc kubenswrapper[5002]: I0930 12:57:47.418333 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7a8b7e27-6872-47a1-b564-9a288ac7cef0-inventory\") pod \"7a8b7e27-6872-47a1-b564-9a288ac7cef0\" (UID: \"7a8b7e27-6872-47a1-b564-9a288ac7cef0\") " Sep 30 12:57:47 crc kubenswrapper[5002]: I0930 12:57:47.428072 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a8b7e27-6872-47a1-b564-9a288ac7cef0-kube-api-access-bq9h2" (OuterVolumeSpecName: "kube-api-access-bq9h2") pod "7a8b7e27-6872-47a1-b564-9a288ac7cef0" (UID: "7a8b7e27-6872-47a1-b564-9a288ac7cef0"). InnerVolumeSpecName "kube-api-access-bq9h2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 12:57:47 crc kubenswrapper[5002]: I0930 12:57:47.434562 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a8b7e27-6872-47a1-b564-9a288ac7cef0-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "7a8b7e27-6872-47a1-b564-9a288ac7cef0" (UID: "7a8b7e27-6872-47a1-b564-9a288ac7cef0"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:57:47 crc kubenswrapper[5002]: I0930 12:57:47.455692 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a8b7e27-6872-47a1-b564-9a288ac7cef0-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "7a8b7e27-6872-47a1-b564-9a288ac7cef0" (UID: "7a8b7e27-6872-47a1-b564-9a288ac7cef0"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:57:47 crc kubenswrapper[5002]: I0930 12:57:47.457266 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a8b7e27-6872-47a1-b564-9a288ac7cef0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7a8b7e27-6872-47a1-b564-9a288ac7cef0" (UID: "7a8b7e27-6872-47a1-b564-9a288ac7cef0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:57:47 crc kubenswrapper[5002]: I0930 12:57:47.465587 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a8b7e27-6872-47a1-b564-9a288ac7cef0-inventory" (OuterVolumeSpecName: "inventory") pod "7a8b7e27-6872-47a1-b564-9a288ac7cef0" (UID: "7a8b7e27-6872-47a1-b564-9a288ac7cef0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 12:57:47 crc kubenswrapper[5002]: I0930 12:57:47.524351 5002 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a8b7e27-6872-47a1-b564-9a288ac7cef0-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 12:57:47 crc kubenswrapper[5002]: I0930 12:57:47.524389 5002 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7a8b7e27-6872-47a1-b564-9a288ac7cef0-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 12:57:47 crc kubenswrapper[5002]: I0930 12:57:47.524399 5002 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7a8b7e27-6872-47a1-b564-9a288ac7cef0-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 12:57:47 crc kubenswrapper[5002]: I0930 12:57:47.524409 5002 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/7a8b7e27-6872-47a1-b564-9a288ac7cef0-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Sep 30 12:57:47 crc kubenswrapper[5002]: I0930 12:57:47.524419 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bq9h2\" (UniqueName: \"kubernetes.io/projected/7a8b7e27-6872-47a1-b564-9a288ac7cef0-kube-api-access-bq9h2\") on node \"crc\" DevicePath \"\"" Sep 30 12:57:47 crc kubenswrapper[5002]: I0930 12:57:47.999368 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v" event={"ID":"7a8b7e27-6872-47a1-b564-9a288ac7cef0","Type":"ContainerDied","Data":"5e61b9701b62113abdf5e5a9274e4be88181a90bac722a6782b9eba330071be9"} Sep 30 12:57:47 crc kubenswrapper[5002]: I0930 12:57:47.999420 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5e61b9701b62113abdf5e5a9274e4be88181a90bac722a6782b9eba330071be9" Sep 30 12:57:47 crc kubenswrapper[5002]: I0930 12:57:47.999521 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.096337 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh"] Sep 30 12:57:48 crc kubenswrapper[5002]: E0930 12:57:48.096855 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ef31068-5bb0-498d-8d1e-6ff6c7ca2541" containerName="extract-utilities" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.096878 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ef31068-5bb0-498d-8d1e-6ff6c7ca2541" containerName="extract-utilities" Sep 30 12:57:48 crc kubenswrapper[5002]: E0930 12:57:48.096900 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a8b7e27-6872-47a1-b564-9a288ac7cef0" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.096910 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a8b7e27-6872-47a1-b564-9a288ac7cef0" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 30 12:57:48 crc kubenswrapper[5002]: E0930 12:57:48.096932 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="add63184-f23e-409b-9a5c-3bd04411a22d" containerName="registry-server" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.096940 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="add63184-f23e-409b-9a5c-3bd04411a22d" containerName="registry-server" Sep 30 12:57:48 crc kubenswrapper[5002]: E0930 12:57:48.096954 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="add63184-f23e-409b-9a5c-3bd04411a22d" containerName="extract-content" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.096961 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="add63184-f23e-409b-9a5c-3bd04411a22d" containerName="extract-content" Sep 30 12:57:48 crc kubenswrapper[5002]: E0930 12:57:48.096981 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ef31068-5bb0-498d-8d1e-6ff6c7ca2541" containerName="registry-server" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.096989 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ef31068-5bb0-498d-8d1e-6ff6c7ca2541" containerName="registry-server" Sep 30 12:57:48 crc kubenswrapper[5002]: E0930 12:57:48.097000 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="319e27ce-2241-42de-90a6-e551d89bd632" containerName="registry-server" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.097009 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="319e27ce-2241-42de-90a6-e551d89bd632" containerName="registry-server" Sep 30 12:57:48 crc kubenswrapper[5002]: E0930 12:57:48.097032 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="319e27ce-2241-42de-90a6-e551d89bd632" containerName="extract-utilities" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.097040 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="319e27ce-2241-42de-90a6-e551d89bd632" containerName="extract-utilities" Sep 30 12:57:48 crc kubenswrapper[5002]: E0930 12:57:48.097055 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="319e27ce-2241-42de-90a6-e551d89bd632" containerName="extract-content" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.097063 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="319e27ce-2241-42de-90a6-e551d89bd632" containerName="extract-content" Sep 30 12:57:48 crc kubenswrapper[5002]: E0930 12:57:48.097071 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ef31068-5bb0-498d-8d1e-6ff6c7ca2541" containerName="extract-content" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.097078 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ef31068-5bb0-498d-8d1e-6ff6c7ca2541" containerName="extract-content" Sep 30 12:57:48 crc kubenswrapper[5002]: E0930 12:57:48.097088 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="add63184-f23e-409b-9a5c-3bd04411a22d" containerName="extract-utilities" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.097097 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="add63184-f23e-409b-9a5c-3bd04411a22d" containerName="extract-utilities" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.097317 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="319e27ce-2241-42de-90a6-e551d89bd632" containerName="registry-server" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.097347 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="add63184-f23e-409b-9a5c-3bd04411a22d" containerName="registry-server" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.097360 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ef31068-5bb0-498d-8d1e-6ff6c7ca2541" containerName="registry-server" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.097382 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a8b7e27-6872-47a1-b564-9a288ac7cef0" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.098245 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.100921 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-ddhrk" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.101000 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.101097 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.101119 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.101363 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.101568 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.107201 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.117595 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh"] Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.235782 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-txpxh\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.235827 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-txpxh\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.235876 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-txpxh\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.235906 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcj26\" (UniqueName: \"kubernetes.io/projected/d81299d2-2b37-4c3f-b313-d02d7b33045c-kube-api-access-wcj26\") pod \"nova-edpm-deployment-openstack-edpm-ipam-txpxh\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.235938 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-txpxh\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.235955 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-txpxh\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.235972 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-txpxh\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.235991 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-txpxh\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.236013 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-txpxh\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.337556 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-txpxh\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.337599 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-txpxh\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.337693 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-txpxh\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.337720 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-txpxh\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.337766 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-txpxh\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.337794 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcj26\" (UniqueName: \"kubernetes.io/projected/d81299d2-2b37-4c3f-b313-d02d7b33045c-kube-api-access-wcj26\") pod \"nova-edpm-deployment-openstack-edpm-ipam-txpxh\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.337827 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-txpxh\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.337843 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-txpxh\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.337861 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-txpxh\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.339277 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-txpxh\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.343829 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-txpxh\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.343930 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-txpxh\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.344046 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-txpxh\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.344274 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-txpxh\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.344815 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-txpxh\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.344964 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-txpxh\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.346301 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-txpxh\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.361968 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcj26\" (UniqueName: \"kubernetes.io/projected/d81299d2-2b37-4c3f-b313-d02d7b33045c-kube-api-access-wcj26\") pod \"nova-edpm-deployment-openstack-edpm-ipam-txpxh\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.417909 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.943087 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh"] Sep 30 12:57:48 crc kubenswrapper[5002]: I0930 12:57:48.963424 5002 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 12:57:49 crc kubenswrapper[5002]: I0930 12:57:49.009209 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" event={"ID":"d81299d2-2b37-4c3f-b313-d02d7b33045c","Type":"ContainerStarted","Data":"31ab3f9961b6647465ae2e34a377bc4e7904f62afc49b13aab61fc35318cd8f7"} Sep 30 12:57:50 crc kubenswrapper[5002]: I0930 12:57:50.019606 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" event={"ID":"d81299d2-2b37-4c3f-b313-d02d7b33045c","Type":"ContainerStarted","Data":"8095a0d20ceb1ed56a1814f16c0f64097156ff903d8e75a0b98729a526426319"} Sep 30 12:57:50 crc kubenswrapper[5002]: I0930 12:57:50.039501 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" podStartSLOduration=1.531345097 podStartE2EDuration="2.039462592s" podCreationTimestamp="2025-09-30 12:57:48 +0000 UTC" firstStartedPulling="2025-09-30 12:57:48.963154072 +0000 UTC m=+2243.212836218" lastFinishedPulling="2025-09-30 12:57:49.471271567 +0000 UTC m=+2243.720953713" observedRunningTime="2025-09-30 12:57:50.039215835 +0000 UTC m=+2244.288897981" watchObservedRunningTime="2025-09-30 12:57:50.039462592 +0000 UTC m=+2244.289144738" Sep 30 12:57:52 crc kubenswrapper[5002]: I0930 12:57:52.681391 5002 scope.go:117] "RemoveContainer" containerID="c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0" Sep 30 12:57:52 crc kubenswrapper[5002]: E0930 12:57:52.682601 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:58:03 crc kubenswrapper[5002]: I0930 12:58:03.676732 5002 scope.go:117] "RemoveContainer" containerID="c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0" Sep 30 12:58:03 crc kubenswrapper[5002]: E0930 12:58:03.677793 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:58:16 crc kubenswrapper[5002]: I0930 12:58:16.683984 5002 scope.go:117] "RemoveContainer" containerID="c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0" Sep 30 12:58:16 crc kubenswrapper[5002]: E0930 12:58:16.684911 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:58:31 crc kubenswrapper[5002]: I0930 12:58:31.676564 5002 scope.go:117] "RemoveContainer" containerID="c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0" Sep 30 12:58:31 crc kubenswrapper[5002]: E0930 12:58:31.677369 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:58:43 crc kubenswrapper[5002]: I0930 12:58:43.675859 5002 scope.go:117] "RemoveContainer" containerID="c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0" Sep 30 12:58:43 crc kubenswrapper[5002]: E0930 12:58:43.676801 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:58:56 crc kubenswrapper[5002]: I0930 12:58:56.683219 5002 scope.go:117] "RemoveContainer" containerID="c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0" Sep 30 12:58:56 crc kubenswrapper[5002]: E0930 12:58:56.683964 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:59:07 crc kubenswrapper[5002]: I0930 12:59:07.677221 5002 scope.go:117] "RemoveContainer" containerID="c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0" Sep 30 12:59:07 crc kubenswrapper[5002]: E0930 12:59:07.678784 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:59:18 crc kubenswrapper[5002]: I0930 12:59:18.676673 5002 scope.go:117] "RemoveContainer" containerID="c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0" Sep 30 12:59:18 crc kubenswrapper[5002]: E0930 12:59:18.677456 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:59:32 crc kubenswrapper[5002]: I0930 12:59:32.676257 5002 scope.go:117] "RemoveContainer" containerID="c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0" Sep 30 12:59:32 crc kubenswrapper[5002]: E0930 12:59:32.676895 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:59:43 crc kubenswrapper[5002]: I0930 12:59:43.675662 5002 scope.go:117] "RemoveContainer" containerID="c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0" Sep 30 12:59:43 crc kubenswrapper[5002]: E0930 12:59:43.676600 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 12:59:54 crc kubenswrapper[5002]: I0930 12:59:54.676346 5002 scope.go:117] "RemoveContainer" containerID="c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0" Sep 30 12:59:54 crc kubenswrapper[5002]: E0930 12:59:54.677405 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:00:00 crc kubenswrapper[5002]: I0930 13:00:00.144169 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320620-65frg"] Sep 30 13:00:00 crc kubenswrapper[5002]: I0930 13:00:00.145912 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320620-65frg" Sep 30 13:00:00 crc kubenswrapper[5002]: I0930 13:00:00.148897 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 13:00:00 crc kubenswrapper[5002]: I0930 13:00:00.149052 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 13:00:00 crc kubenswrapper[5002]: I0930 13:00:00.155440 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320620-65frg"] Sep 30 13:00:00 crc kubenswrapper[5002]: I0930 13:00:00.183194 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/79b6ba14-904d-4c08-8416-c8805f76e72e-config-volume\") pod \"collect-profiles-29320620-65frg\" (UID: \"79b6ba14-904d-4c08-8416-c8805f76e72e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320620-65frg" Sep 30 13:00:00 crc kubenswrapper[5002]: I0930 13:00:00.183346 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9x8zr\" (UniqueName: \"kubernetes.io/projected/79b6ba14-904d-4c08-8416-c8805f76e72e-kube-api-access-9x8zr\") pod \"collect-profiles-29320620-65frg\" (UID: \"79b6ba14-904d-4c08-8416-c8805f76e72e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320620-65frg" Sep 30 13:00:00 crc kubenswrapper[5002]: I0930 13:00:00.183910 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/79b6ba14-904d-4c08-8416-c8805f76e72e-secret-volume\") pod \"collect-profiles-29320620-65frg\" (UID: \"79b6ba14-904d-4c08-8416-c8805f76e72e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320620-65frg" Sep 30 13:00:00 crc kubenswrapper[5002]: I0930 13:00:00.285628 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/79b6ba14-904d-4c08-8416-c8805f76e72e-secret-volume\") pod \"collect-profiles-29320620-65frg\" (UID: \"79b6ba14-904d-4c08-8416-c8805f76e72e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320620-65frg" Sep 30 13:00:00 crc kubenswrapper[5002]: I0930 13:00:00.285702 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/79b6ba14-904d-4c08-8416-c8805f76e72e-config-volume\") pod \"collect-profiles-29320620-65frg\" (UID: \"79b6ba14-904d-4c08-8416-c8805f76e72e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320620-65frg" Sep 30 13:00:00 crc kubenswrapper[5002]: I0930 13:00:00.285761 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9x8zr\" (UniqueName: \"kubernetes.io/projected/79b6ba14-904d-4c08-8416-c8805f76e72e-kube-api-access-9x8zr\") pod \"collect-profiles-29320620-65frg\" (UID: \"79b6ba14-904d-4c08-8416-c8805f76e72e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320620-65frg" Sep 30 13:00:00 crc kubenswrapper[5002]: I0930 13:00:00.287259 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/79b6ba14-904d-4c08-8416-c8805f76e72e-config-volume\") pod \"collect-profiles-29320620-65frg\" (UID: \"79b6ba14-904d-4c08-8416-c8805f76e72e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320620-65frg" Sep 30 13:00:00 crc kubenswrapper[5002]: I0930 13:00:00.294027 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/79b6ba14-904d-4c08-8416-c8805f76e72e-secret-volume\") pod \"collect-profiles-29320620-65frg\" (UID: \"79b6ba14-904d-4c08-8416-c8805f76e72e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320620-65frg" Sep 30 13:00:00 crc kubenswrapper[5002]: I0930 13:00:00.304347 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9x8zr\" (UniqueName: \"kubernetes.io/projected/79b6ba14-904d-4c08-8416-c8805f76e72e-kube-api-access-9x8zr\") pod \"collect-profiles-29320620-65frg\" (UID: \"79b6ba14-904d-4c08-8416-c8805f76e72e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320620-65frg" Sep 30 13:00:00 crc kubenswrapper[5002]: I0930 13:00:00.463236 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320620-65frg" Sep 30 13:00:00 crc kubenswrapper[5002]: I0930 13:00:00.927636 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320620-65frg"] Sep 30 13:00:01 crc kubenswrapper[5002]: I0930 13:00:01.207632 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320620-65frg" event={"ID":"79b6ba14-904d-4c08-8416-c8805f76e72e","Type":"ContainerStarted","Data":"4d0370f90232a467d92883c28822973cdd468533154d9be28e9c5f1be8e4874f"} Sep 30 13:00:01 crc kubenswrapper[5002]: I0930 13:00:01.207990 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320620-65frg" event={"ID":"79b6ba14-904d-4c08-8416-c8805f76e72e","Type":"ContainerStarted","Data":"6af961b657147d2ad23a8f1b54ad7a2d02ae0f7b7e96d80704104d92604f69d2"} Sep 30 13:00:02 crc kubenswrapper[5002]: I0930 13:00:02.218382 5002 generic.go:334] "Generic (PLEG): container finished" podID="79b6ba14-904d-4c08-8416-c8805f76e72e" containerID="4d0370f90232a467d92883c28822973cdd468533154d9be28e9c5f1be8e4874f" exitCode=0 Sep 30 13:00:02 crc kubenswrapper[5002]: I0930 13:00:02.218496 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320620-65frg" event={"ID":"79b6ba14-904d-4c08-8416-c8805f76e72e","Type":"ContainerDied","Data":"4d0370f90232a467d92883c28822973cdd468533154d9be28e9c5f1be8e4874f"} Sep 30 13:00:03 crc kubenswrapper[5002]: I0930 13:00:03.567556 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320620-65frg" Sep 30 13:00:03 crc kubenswrapper[5002]: I0930 13:00:03.650099 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/79b6ba14-904d-4c08-8416-c8805f76e72e-secret-volume\") pod \"79b6ba14-904d-4c08-8416-c8805f76e72e\" (UID: \"79b6ba14-904d-4c08-8416-c8805f76e72e\") " Sep 30 13:00:03 crc kubenswrapper[5002]: I0930 13:00:03.650241 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/79b6ba14-904d-4c08-8416-c8805f76e72e-config-volume\") pod \"79b6ba14-904d-4c08-8416-c8805f76e72e\" (UID: \"79b6ba14-904d-4c08-8416-c8805f76e72e\") " Sep 30 13:00:03 crc kubenswrapper[5002]: I0930 13:00:03.650347 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9x8zr\" (UniqueName: \"kubernetes.io/projected/79b6ba14-904d-4c08-8416-c8805f76e72e-kube-api-access-9x8zr\") pod \"79b6ba14-904d-4c08-8416-c8805f76e72e\" (UID: \"79b6ba14-904d-4c08-8416-c8805f76e72e\") " Sep 30 13:00:03 crc kubenswrapper[5002]: I0930 13:00:03.650934 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79b6ba14-904d-4c08-8416-c8805f76e72e-config-volume" (OuterVolumeSpecName: "config-volume") pod "79b6ba14-904d-4c08-8416-c8805f76e72e" (UID: "79b6ba14-904d-4c08-8416-c8805f76e72e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:00:03 crc kubenswrapper[5002]: I0930 13:00:03.651222 5002 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/79b6ba14-904d-4c08-8416-c8805f76e72e-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 13:00:03 crc kubenswrapper[5002]: I0930 13:00:03.657067 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79b6ba14-904d-4c08-8416-c8805f76e72e-kube-api-access-9x8zr" (OuterVolumeSpecName: "kube-api-access-9x8zr") pod "79b6ba14-904d-4c08-8416-c8805f76e72e" (UID: "79b6ba14-904d-4c08-8416-c8805f76e72e"). InnerVolumeSpecName "kube-api-access-9x8zr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:00:03 crc kubenswrapper[5002]: I0930 13:00:03.657547 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79b6ba14-904d-4c08-8416-c8805f76e72e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "79b6ba14-904d-4c08-8416-c8805f76e72e" (UID: "79b6ba14-904d-4c08-8416-c8805f76e72e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:00:03 crc kubenswrapper[5002]: I0930 13:00:03.752657 5002 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/79b6ba14-904d-4c08-8416-c8805f76e72e-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 13:00:03 crc kubenswrapper[5002]: I0930 13:00:03.752696 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9x8zr\" (UniqueName: \"kubernetes.io/projected/79b6ba14-904d-4c08-8416-c8805f76e72e-kube-api-access-9x8zr\") on node \"crc\" DevicePath \"\"" Sep 30 13:00:04 crc kubenswrapper[5002]: I0930 13:00:04.242283 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320620-65frg" event={"ID":"79b6ba14-904d-4c08-8416-c8805f76e72e","Type":"ContainerDied","Data":"6af961b657147d2ad23a8f1b54ad7a2d02ae0f7b7e96d80704104d92604f69d2"} Sep 30 13:00:04 crc kubenswrapper[5002]: I0930 13:00:04.242341 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320620-65frg" Sep 30 13:00:04 crc kubenswrapper[5002]: I0930 13:00:04.242375 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6af961b657147d2ad23a8f1b54ad7a2d02ae0f7b7e96d80704104d92604f69d2" Sep 30 13:00:04 crc kubenswrapper[5002]: I0930 13:00:04.294803 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320575-h29rv"] Sep 30 13:00:04 crc kubenswrapper[5002]: I0930 13:00:04.301565 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320575-h29rv"] Sep 30 13:00:04 crc kubenswrapper[5002]: I0930 13:00:04.688596 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0335170b-46ee-4cd3-aae5-694623192d49" path="/var/lib/kubelet/pods/0335170b-46ee-4cd3-aae5-694623192d49/volumes" Sep 30 13:00:07 crc kubenswrapper[5002]: I0930 13:00:07.676665 5002 scope.go:117] "RemoveContainer" containerID="c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0" Sep 30 13:00:07 crc kubenswrapper[5002]: E0930 13:00:07.677603 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:00:18 crc kubenswrapper[5002]: I0930 13:00:18.676821 5002 scope.go:117] "RemoveContainer" containerID="c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0" Sep 30 13:00:18 crc kubenswrapper[5002]: E0930 13:00:18.678014 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:00:30 crc kubenswrapper[5002]: I0930 13:00:30.676451 5002 scope.go:117] "RemoveContainer" containerID="c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0" Sep 30 13:00:30 crc kubenswrapper[5002]: E0930 13:00:30.677453 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:00:43 crc kubenswrapper[5002]: I0930 13:00:43.070975 5002 scope.go:117] "RemoveContainer" containerID="84e129322ffd4f8541b7a55586bc94416e78bbd8fd3fb4407ccf393239fe2adc" Sep 30 13:00:44 crc kubenswrapper[5002]: I0930 13:00:44.676846 5002 scope.go:117] "RemoveContainer" containerID="c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0" Sep 30 13:00:44 crc kubenswrapper[5002]: E0930 13:00:44.677665 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:00:57 crc kubenswrapper[5002]: I0930 13:00:57.677075 5002 scope.go:117] "RemoveContainer" containerID="c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0" Sep 30 13:00:57 crc kubenswrapper[5002]: E0930 13:00:57.677892 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:01:00 crc kubenswrapper[5002]: I0930 13:01:00.149265 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29320621-l2g2n"] Sep 30 13:01:00 crc kubenswrapper[5002]: E0930 13:01:00.150320 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79b6ba14-904d-4c08-8416-c8805f76e72e" containerName="collect-profiles" Sep 30 13:01:00 crc kubenswrapper[5002]: I0930 13:01:00.150340 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="79b6ba14-904d-4c08-8416-c8805f76e72e" containerName="collect-profiles" Sep 30 13:01:00 crc kubenswrapper[5002]: I0930 13:01:00.150587 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="79b6ba14-904d-4c08-8416-c8805f76e72e" containerName="collect-profiles" Sep 30 13:01:00 crc kubenswrapper[5002]: I0930 13:01:00.151430 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29320621-l2g2n" Sep 30 13:01:00 crc kubenswrapper[5002]: I0930 13:01:00.165815 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29320621-l2g2n"] Sep 30 13:01:00 crc kubenswrapper[5002]: I0930 13:01:00.278239 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/321c3fc2-62d9-46a2-99b3-d1bc4e7e534c-fernet-keys\") pod \"keystone-cron-29320621-l2g2n\" (UID: \"321c3fc2-62d9-46a2-99b3-d1bc4e7e534c\") " pod="openstack/keystone-cron-29320621-l2g2n" Sep 30 13:01:00 crc kubenswrapper[5002]: I0930 13:01:00.278314 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cf667\" (UniqueName: \"kubernetes.io/projected/321c3fc2-62d9-46a2-99b3-d1bc4e7e534c-kube-api-access-cf667\") pod \"keystone-cron-29320621-l2g2n\" (UID: \"321c3fc2-62d9-46a2-99b3-d1bc4e7e534c\") " pod="openstack/keystone-cron-29320621-l2g2n" Sep 30 13:01:00 crc kubenswrapper[5002]: I0930 13:01:00.278401 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/321c3fc2-62d9-46a2-99b3-d1bc4e7e534c-combined-ca-bundle\") pod \"keystone-cron-29320621-l2g2n\" (UID: \"321c3fc2-62d9-46a2-99b3-d1bc4e7e534c\") " pod="openstack/keystone-cron-29320621-l2g2n" Sep 30 13:01:00 crc kubenswrapper[5002]: I0930 13:01:00.278437 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/321c3fc2-62d9-46a2-99b3-d1bc4e7e534c-config-data\") pod \"keystone-cron-29320621-l2g2n\" (UID: \"321c3fc2-62d9-46a2-99b3-d1bc4e7e534c\") " pod="openstack/keystone-cron-29320621-l2g2n" Sep 30 13:01:00 crc kubenswrapper[5002]: I0930 13:01:00.380168 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/321c3fc2-62d9-46a2-99b3-d1bc4e7e534c-fernet-keys\") pod \"keystone-cron-29320621-l2g2n\" (UID: \"321c3fc2-62d9-46a2-99b3-d1bc4e7e534c\") " pod="openstack/keystone-cron-29320621-l2g2n" Sep 30 13:01:00 crc kubenswrapper[5002]: I0930 13:01:00.380237 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cf667\" (UniqueName: \"kubernetes.io/projected/321c3fc2-62d9-46a2-99b3-d1bc4e7e534c-kube-api-access-cf667\") pod \"keystone-cron-29320621-l2g2n\" (UID: \"321c3fc2-62d9-46a2-99b3-d1bc4e7e534c\") " pod="openstack/keystone-cron-29320621-l2g2n" Sep 30 13:01:00 crc kubenswrapper[5002]: I0930 13:01:00.380287 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/321c3fc2-62d9-46a2-99b3-d1bc4e7e534c-combined-ca-bundle\") pod \"keystone-cron-29320621-l2g2n\" (UID: \"321c3fc2-62d9-46a2-99b3-d1bc4e7e534c\") " pod="openstack/keystone-cron-29320621-l2g2n" Sep 30 13:01:00 crc kubenswrapper[5002]: I0930 13:01:00.380306 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/321c3fc2-62d9-46a2-99b3-d1bc4e7e534c-config-data\") pod \"keystone-cron-29320621-l2g2n\" (UID: \"321c3fc2-62d9-46a2-99b3-d1bc4e7e534c\") " pod="openstack/keystone-cron-29320621-l2g2n" Sep 30 13:01:00 crc kubenswrapper[5002]: I0930 13:01:00.386275 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/321c3fc2-62d9-46a2-99b3-d1bc4e7e534c-config-data\") pod \"keystone-cron-29320621-l2g2n\" (UID: \"321c3fc2-62d9-46a2-99b3-d1bc4e7e534c\") " pod="openstack/keystone-cron-29320621-l2g2n" Sep 30 13:01:00 crc kubenswrapper[5002]: I0930 13:01:00.386668 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/321c3fc2-62d9-46a2-99b3-d1bc4e7e534c-fernet-keys\") pod \"keystone-cron-29320621-l2g2n\" (UID: \"321c3fc2-62d9-46a2-99b3-d1bc4e7e534c\") " pod="openstack/keystone-cron-29320621-l2g2n" Sep 30 13:01:00 crc kubenswrapper[5002]: I0930 13:01:00.386758 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/321c3fc2-62d9-46a2-99b3-d1bc4e7e534c-combined-ca-bundle\") pod \"keystone-cron-29320621-l2g2n\" (UID: \"321c3fc2-62d9-46a2-99b3-d1bc4e7e534c\") " pod="openstack/keystone-cron-29320621-l2g2n" Sep 30 13:01:00 crc kubenswrapper[5002]: I0930 13:01:00.396499 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cf667\" (UniqueName: \"kubernetes.io/projected/321c3fc2-62d9-46a2-99b3-d1bc4e7e534c-kube-api-access-cf667\") pod \"keystone-cron-29320621-l2g2n\" (UID: \"321c3fc2-62d9-46a2-99b3-d1bc4e7e534c\") " pod="openstack/keystone-cron-29320621-l2g2n" Sep 30 13:01:00 crc kubenswrapper[5002]: I0930 13:01:00.478495 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29320621-l2g2n" Sep 30 13:01:00 crc kubenswrapper[5002]: I0930 13:01:00.908917 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29320621-l2g2n"] Sep 30 13:01:00 crc kubenswrapper[5002]: W0930 13:01:00.913778 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod321c3fc2_62d9_46a2_99b3_d1bc4e7e534c.slice/crio-6b1520e015cce5a01f7c19866436a722483a5308ed7364f203adee469bbe89b6 WatchSource:0}: Error finding container 6b1520e015cce5a01f7c19866436a722483a5308ed7364f203adee469bbe89b6: Status 404 returned error can't find the container with id 6b1520e015cce5a01f7c19866436a722483a5308ed7364f203adee469bbe89b6 Sep 30 13:01:01 crc kubenswrapper[5002]: I0930 13:01:01.762052 5002 generic.go:334] "Generic (PLEG): container finished" podID="d81299d2-2b37-4c3f-b313-d02d7b33045c" containerID="8095a0d20ceb1ed56a1814f16c0f64097156ff903d8e75a0b98729a526426319" exitCode=0 Sep 30 13:01:01 crc kubenswrapper[5002]: I0930 13:01:01.762108 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" event={"ID":"d81299d2-2b37-4c3f-b313-d02d7b33045c","Type":"ContainerDied","Data":"8095a0d20ceb1ed56a1814f16c0f64097156ff903d8e75a0b98729a526426319"} Sep 30 13:01:01 crc kubenswrapper[5002]: I0930 13:01:01.766740 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29320621-l2g2n" event={"ID":"321c3fc2-62d9-46a2-99b3-d1bc4e7e534c","Type":"ContainerStarted","Data":"7c6bcb61030c6577b49c4dcd6a6a137ba5e28935e0b2a8d77aa46b618c0b3fb0"} Sep 30 13:01:01 crc kubenswrapper[5002]: I0930 13:01:01.766786 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29320621-l2g2n" event={"ID":"321c3fc2-62d9-46a2-99b3-d1bc4e7e534c","Type":"ContainerStarted","Data":"6b1520e015cce5a01f7c19866436a722483a5308ed7364f203adee469bbe89b6"} Sep 30 13:01:01 crc kubenswrapper[5002]: I0930 13:01:01.812927 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29320621-l2g2n" podStartSLOduration=1.812904743 podStartE2EDuration="1.812904743s" podCreationTimestamp="2025-09-30 13:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:01:01.800571266 +0000 UTC m=+2436.050253412" watchObservedRunningTime="2025-09-30 13:01:01.812904743 +0000 UTC m=+2436.062586889" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.233619 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.339519 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-combined-ca-bundle\") pod \"d81299d2-2b37-4c3f-b313-d02d7b33045c\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.339617 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-migration-ssh-key-1\") pod \"d81299d2-2b37-4c3f-b313-d02d7b33045c\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.339675 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-inventory\") pod \"d81299d2-2b37-4c3f-b313-d02d7b33045c\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.339730 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-ssh-key\") pod \"d81299d2-2b37-4c3f-b313-d02d7b33045c\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.339781 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-cell1-compute-config-0\") pod \"d81299d2-2b37-4c3f-b313-d02d7b33045c\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.339829 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-extra-config-0\") pod \"d81299d2-2b37-4c3f-b313-d02d7b33045c\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.339900 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-migration-ssh-key-0\") pod \"d81299d2-2b37-4c3f-b313-d02d7b33045c\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.339970 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-cell1-compute-config-1\") pod \"d81299d2-2b37-4c3f-b313-d02d7b33045c\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.339992 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wcj26\" (UniqueName: \"kubernetes.io/projected/d81299d2-2b37-4c3f-b313-d02d7b33045c-kube-api-access-wcj26\") pod \"d81299d2-2b37-4c3f-b313-d02d7b33045c\" (UID: \"d81299d2-2b37-4c3f-b313-d02d7b33045c\") " Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.345094 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d81299d2-2b37-4c3f-b313-d02d7b33045c-kube-api-access-wcj26" (OuterVolumeSpecName: "kube-api-access-wcj26") pod "d81299d2-2b37-4c3f-b313-d02d7b33045c" (UID: "d81299d2-2b37-4c3f-b313-d02d7b33045c"). InnerVolumeSpecName "kube-api-access-wcj26". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.346227 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "d81299d2-2b37-4c3f-b313-d02d7b33045c" (UID: "d81299d2-2b37-4c3f-b313-d02d7b33045c"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.365432 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "d81299d2-2b37-4c3f-b313-d02d7b33045c" (UID: "d81299d2-2b37-4c3f-b313-d02d7b33045c"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.367522 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d81299d2-2b37-4c3f-b313-d02d7b33045c" (UID: "d81299d2-2b37-4c3f-b313-d02d7b33045c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.369021 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "d81299d2-2b37-4c3f-b313-d02d7b33045c" (UID: "d81299d2-2b37-4c3f-b313-d02d7b33045c"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.374255 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "d81299d2-2b37-4c3f-b313-d02d7b33045c" (UID: "d81299d2-2b37-4c3f-b313-d02d7b33045c"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.374836 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-inventory" (OuterVolumeSpecName: "inventory") pod "d81299d2-2b37-4c3f-b313-d02d7b33045c" (UID: "d81299d2-2b37-4c3f-b313-d02d7b33045c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.375289 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "d81299d2-2b37-4c3f-b313-d02d7b33045c" (UID: "d81299d2-2b37-4c3f-b313-d02d7b33045c"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.377051 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "d81299d2-2b37-4c3f-b313-d02d7b33045c" (UID: "d81299d2-2b37-4c3f-b313-d02d7b33045c"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.442503 5002 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.442547 5002 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.442562 5002 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.442577 5002 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.442590 5002 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.442602 5002 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.442614 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wcj26\" (UniqueName: \"kubernetes.io/projected/d81299d2-2b37-4c3f-b313-d02d7b33045c-kube-api-access-wcj26\") on node \"crc\" DevicePath \"\"" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.442626 5002 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.442637 5002 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d81299d2-2b37-4c3f-b313-d02d7b33045c-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.786716 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" event={"ID":"d81299d2-2b37-4c3f-b313-d02d7b33045c","Type":"ContainerDied","Data":"31ab3f9961b6647465ae2e34a377bc4e7904f62afc49b13aab61fc35318cd8f7"} Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.786801 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="31ab3f9961b6647465ae2e34a377bc4e7904f62afc49b13aab61fc35318cd8f7" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.786764 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-txpxh" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.789867 5002 generic.go:334] "Generic (PLEG): container finished" podID="321c3fc2-62d9-46a2-99b3-d1bc4e7e534c" containerID="7c6bcb61030c6577b49c4dcd6a6a137ba5e28935e0b2a8d77aa46b618c0b3fb0" exitCode=0 Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.789917 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29320621-l2g2n" event={"ID":"321c3fc2-62d9-46a2-99b3-d1bc4e7e534c","Type":"ContainerDied","Data":"7c6bcb61030c6577b49c4dcd6a6a137ba5e28935e0b2a8d77aa46b618c0b3fb0"} Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.901929 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r"] Sep 30 13:01:03 crc kubenswrapper[5002]: E0930 13:01:03.902694 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d81299d2-2b37-4c3f-b313-d02d7b33045c" containerName="nova-edpm-deployment-openstack-edpm-ipam" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.902715 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="d81299d2-2b37-4c3f-b313-d02d7b33045c" containerName="nova-edpm-deployment-openstack-edpm-ipam" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.902952 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="d81299d2-2b37-4c3f-b313-d02d7b33045c" containerName="nova-edpm-deployment-openstack-edpm-ipam" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.903805 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.906102 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.906242 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-ddhrk" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.906360 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.906366 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.906424 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.924850 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r"] Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.954242 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-r748r\" (UID: \"5614484d-58b4-42e2-94a5-dda83b89be64\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.954310 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-r748r\" (UID: \"5614484d-58b4-42e2-94a5-dda83b89be64\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.954427 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-r748r\" (UID: \"5614484d-58b4-42e2-94a5-dda83b89be64\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.954547 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-r748r\" (UID: \"5614484d-58b4-42e2-94a5-dda83b89be64\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.954611 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pw59d\" (UniqueName: \"kubernetes.io/projected/5614484d-58b4-42e2-94a5-dda83b89be64-kube-api-access-pw59d\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-r748r\" (UID: \"5614484d-58b4-42e2-94a5-dda83b89be64\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.954649 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-r748r\" (UID: \"5614484d-58b4-42e2-94a5-dda83b89be64\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" Sep 30 13:01:03 crc kubenswrapper[5002]: I0930 13:01:03.954733 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-r748r\" (UID: \"5614484d-58b4-42e2-94a5-dda83b89be64\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" Sep 30 13:01:04 crc kubenswrapper[5002]: I0930 13:01:04.056389 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-r748r\" (UID: \"5614484d-58b4-42e2-94a5-dda83b89be64\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" Sep 30 13:01:04 crc kubenswrapper[5002]: I0930 13:01:04.056466 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-r748r\" (UID: \"5614484d-58b4-42e2-94a5-dda83b89be64\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" Sep 30 13:01:04 crc kubenswrapper[5002]: I0930 13:01:04.056621 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-r748r\" (UID: \"5614484d-58b4-42e2-94a5-dda83b89be64\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" Sep 30 13:01:04 crc kubenswrapper[5002]: I0930 13:01:04.056670 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-r748r\" (UID: \"5614484d-58b4-42e2-94a5-dda83b89be64\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" Sep 30 13:01:04 crc kubenswrapper[5002]: I0930 13:01:04.056705 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pw59d\" (UniqueName: \"kubernetes.io/projected/5614484d-58b4-42e2-94a5-dda83b89be64-kube-api-access-pw59d\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-r748r\" (UID: \"5614484d-58b4-42e2-94a5-dda83b89be64\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" Sep 30 13:01:04 crc kubenswrapper[5002]: I0930 13:01:04.056742 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-r748r\" (UID: \"5614484d-58b4-42e2-94a5-dda83b89be64\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" Sep 30 13:01:04 crc kubenswrapper[5002]: I0930 13:01:04.056808 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-r748r\" (UID: \"5614484d-58b4-42e2-94a5-dda83b89be64\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" Sep 30 13:01:04 crc kubenswrapper[5002]: I0930 13:01:04.060579 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-r748r\" (UID: \"5614484d-58b4-42e2-94a5-dda83b89be64\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" Sep 30 13:01:04 crc kubenswrapper[5002]: I0930 13:01:04.060606 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-r748r\" (UID: \"5614484d-58b4-42e2-94a5-dda83b89be64\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" Sep 30 13:01:04 crc kubenswrapper[5002]: I0930 13:01:04.060760 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-r748r\" (UID: \"5614484d-58b4-42e2-94a5-dda83b89be64\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" Sep 30 13:01:04 crc kubenswrapper[5002]: I0930 13:01:04.060904 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-r748r\" (UID: \"5614484d-58b4-42e2-94a5-dda83b89be64\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" Sep 30 13:01:04 crc kubenswrapper[5002]: I0930 13:01:04.061580 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-r748r\" (UID: \"5614484d-58b4-42e2-94a5-dda83b89be64\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" Sep 30 13:01:04 crc kubenswrapper[5002]: I0930 13:01:04.064730 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-r748r\" (UID: \"5614484d-58b4-42e2-94a5-dda83b89be64\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" Sep 30 13:01:04 crc kubenswrapper[5002]: I0930 13:01:04.078096 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pw59d\" (UniqueName: \"kubernetes.io/projected/5614484d-58b4-42e2-94a5-dda83b89be64-kube-api-access-pw59d\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-r748r\" (UID: \"5614484d-58b4-42e2-94a5-dda83b89be64\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" Sep 30 13:01:04 crc kubenswrapper[5002]: I0930 13:01:04.229986 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" Sep 30 13:01:04 crc kubenswrapper[5002]: I0930 13:01:04.747939 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r"] Sep 30 13:01:04 crc kubenswrapper[5002]: I0930 13:01:04.800679 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" event={"ID":"5614484d-58b4-42e2-94a5-dda83b89be64","Type":"ContainerStarted","Data":"5ecedc38008edec62f82968e70750a794756d72a9acf33e91ba82f29df9994e2"} Sep 30 13:01:05 crc kubenswrapper[5002]: I0930 13:01:05.082328 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29320621-l2g2n" Sep 30 13:01:05 crc kubenswrapper[5002]: I0930 13:01:05.178123 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cf667\" (UniqueName: \"kubernetes.io/projected/321c3fc2-62d9-46a2-99b3-d1bc4e7e534c-kube-api-access-cf667\") pod \"321c3fc2-62d9-46a2-99b3-d1bc4e7e534c\" (UID: \"321c3fc2-62d9-46a2-99b3-d1bc4e7e534c\") " Sep 30 13:01:05 crc kubenswrapper[5002]: I0930 13:01:05.178246 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/321c3fc2-62d9-46a2-99b3-d1bc4e7e534c-config-data\") pod \"321c3fc2-62d9-46a2-99b3-d1bc4e7e534c\" (UID: \"321c3fc2-62d9-46a2-99b3-d1bc4e7e534c\") " Sep 30 13:01:05 crc kubenswrapper[5002]: I0930 13:01:05.178410 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/321c3fc2-62d9-46a2-99b3-d1bc4e7e534c-fernet-keys\") pod \"321c3fc2-62d9-46a2-99b3-d1bc4e7e534c\" (UID: \"321c3fc2-62d9-46a2-99b3-d1bc4e7e534c\") " Sep 30 13:01:05 crc kubenswrapper[5002]: I0930 13:01:05.178439 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/321c3fc2-62d9-46a2-99b3-d1bc4e7e534c-combined-ca-bundle\") pod \"321c3fc2-62d9-46a2-99b3-d1bc4e7e534c\" (UID: \"321c3fc2-62d9-46a2-99b3-d1bc4e7e534c\") " Sep 30 13:01:05 crc kubenswrapper[5002]: I0930 13:01:05.185421 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/321c3fc2-62d9-46a2-99b3-d1bc4e7e534c-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "321c3fc2-62d9-46a2-99b3-d1bc4e7e534c" (UID: "321c3fc2-62d9-46a2-99b3-d1bc4e7e534c"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:01:05 crc kubenswrapper[5002]: I0930 13:01:05.185788 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/321c3fc2-62d9-46a2-99b3-d1bc4e7e534c-kube-api-access-cf667" (OuterVolumeSpecName: "kube-api-access-cf667") pod "321c3fc2-62d9-46a2-99b3-d1bc4e7e534c" (UID: "321c3fc2-62d9-46a2-99b3-d1bc4e7e534c"). InnerVolumeSpecName "kube-api-access-cf667". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:01:05 crc kubenswrapper[5002]: I0930 13:01:05.210333 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/321c3fc2-62d9-46a2-99b3-d1bc4e7e534c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "321c3fc2-62d9-46a2-99b3-d1bc4e7e534c" (UID: "321c3fc2-62d9-46a2-99b3-d1bc4e7e534c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:01:05 crc kubenswrapper[5002]: I0930 13:01:05.244299 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/321c3fc2-62d9-46a2-99b3-d1bc4e7e534c-config-data" (OuterVolumeSpecName: "config-data") pod "321c3fc2-62d9-46a2-99b3-d1bc4e7e534c" (UID: "321c3fc2-62d9-46a2-99b3-d1bc4e7e534c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:01:05 crc kubenswrapper[5002]: I0930 13:01:05.282447 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cf667\" (UniqueName: \"kubernetes.io/projected/321c3fc2-62d9-46a2-99b3-d1bc4e7e534c-kube-api-access-cf667\") on node \"crc\" DevicePath \"\"" Sep 30 13:01:05 crc kubenswrapper[5002]: I0930 13:01:05.282509 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/321c3fc2-62d9-46a2-99b3-d1bc4e7e534c-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:01:05 crc kubenswrapper[5002]: I0930 13:01:05.282522 5002 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/321c3fc2-62d9-46a2-99b3-d1bc4e7e534c-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 30 13:01:05 crc kubenswrapper[5002]: I0930 13:01:05.282532 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/321c3fc2-62d9-46a2-99b3-d1bc4e7e534c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:01:05 crc kubenswrapper[5002]: I0930 13:01:05.811410 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29320621-l2g2n" Sep 30 13:01:05 crc kubenswrapper[5002]: I0930 13:01:05.811410 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29320621-l2g2n" event={"ID":"321c3fc2-62d9-46a2-99b3-d1bc4e7e534c","Type":"ContainerDied","Data":"6b1520e015cce5a01f7c19866436a722483a5308ed7364f203adee469bbe89b6"} Sep 30 13:01:05 crc kubenswrapper[5002]: I0930 13:01:05.812451 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b1520e015cce5a01f7c19866436a722483a5308ed7364f203adee469bbe89b6" Sep 30 13:01:05 crc kubenswrapper[5002]: I0930 13:01:05.813239 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" event={"ID":"5614484d-58b4-42e2-94a5-dda83b89be64","Type":"ContainerStarted","Data":"f0090e2a29a2b9f6c7c643c0f5e67bfdab32f2b64c97ce0a83cf6310449582db"} Sep 30 13:01:05 crc kubenswrapper[5002]: I0930 13:01:05.829647 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" podStartSLOduration=2.386233034 podStartE2EDuration="2.829622128s" podCreationTimestamp="2025-09-30 13:01:03 +0000 UTC" firstStartedPulling="2025-09-30 13:01:04.75782544 +0000 UTC m=+2439.007507586" lastFinishedPulling="2025-09-30 13:01:05.201214524 +0000 UTC m=+2439.450896680" observedRunningTime="2025-09-30 13:01:05.827353287 +0000 UTC m=+2440.077035453" watchObservedRunningTime="2025-09-30 13:01:05.829622128 +0000 UTC m=+2440.079304284" Sep 30 13:01:09 crc kubenswrapper[5002]: I0930 13:01:09.675883 5002 scope.go:117] "RemoveContainer" containerID="c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0" Sep 30 13:01:09 crc kubenswrapper[5002]: E0930 13:01:09.676712 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:01:24 crc kubenswrapper[5002]: I0930 13:01:24.676319 5002 scope.go:117] "RemoveContainer" containerID="c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0" Sep 30 13:01:24 crc kubenswrapper[5002]: E0930 13:01:24.677078 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:01:39 crc kubenswrapper[5002]: I0930 13:01:39.676836 5002 scope.go:117] "RemoveContainer" containerID="c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0" Sep 30 13:01:40 crc kubenswrapper[5002]: I0930 13:01:40.143863 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerStarted","Data":"2171ea1d3e69548aa803f0251f328d7b726c5d0d03df99465807718d28b0c056"} Sep 30 13:01:43 crc kubenswrapper[5002]: I0930 13:01:43.124613 5002 scope.go:117] "RemoveContainer" containerID="5ce6f63db40384e402319345e561b085d85c8a6cf15026d119ca2ddffe44797f" Sep 30 13:01:43 crc kubenswrapper[5002]: I0930 13:01:43.159996 5002 scope.go:117] "RemoveContainer" containerID="abb6900d289202ade25969bdb671915bec42deb7d0b6f18b7e11dbebaac50c99" Sep 30 13:01:43 crc kubenswrapper[5002]: I0930 13:01:43.249784 5002 scope.go:117] "RemoveContainer" containerID="3653664761d4eed18f30e39cec7bcea5408cef3331c53302d8cd1ab1466fdfd8" Sep 30 13:03:25 crc kubenswrapper[5002]: I0930 13:03:25.118221 5002 generic.go:334] "Generic (PLEG): container finished" podID="5614484d-58b4-42e2-94a5-dda83b89be64" containerID="f0090e2a29a2b9f6c7c643c0f5e67bfdab32f2b64c97ce0a83cf6310449582db" exitCode=0 Sep 30 13:03:25 crc kubenswrapper[5002]: I0930 13:03:25.118297 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" event={"ID":"5614484d-58b4-42e2-94a5-dda83b89be64","Type":"ContainerDied","Data":"f0090e2a29a2b9f6c7c643c0f5e67bfdab32f2b64c97ce0a83cf6310449582db"} Sep 30 13:03:26 crc kubenswrapper[5002]: I0930 13:03:26.547638 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" Sep 30 13:03:26 crc kubenswrapper[5002]: I0930 13:03:26.725520 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pw59d\" (UniqueName: \"kubernetes.io/projected/5614484d-58b4-42e2-94a5-dda83b89be64-kube-api-access-pw59d\") pod \"5614484d-58b4-42e2-94a5-dda83b89be64\" (UID: \"5614484d-58b4-42e2-94a5-dda83b89be64\") " Sep 30 13:03:26 crc kubenswrapper[5002]: I0930 13:03:26.725609 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-ssh-key\") pod \"5614484d-58b4-42e2-94a5-dda83b89be64\" (UID: \"5614484d-58b4-42e2-94a5-dda83b89be64\") " Sep 30 13:03:26 crc kubenswrapper[5002]: I0930 13:03:26.725641 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-telemetry-combined-ca-bundle\") pod \"5614484d-58b4-42e2-94a5-dda83b89be64\" (UID: \"5614484d-58b4-42e2-94a5-dda83b89be64\") " Sep 30 13:03:26 crc kubenswrapper[5002]: I0930 13:03:26.725758 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-ceilometer-compute-config-data-1\") pod \"5614484d-58b4-42e2-94a5-dda83b89be64\" (UID: \"5614484d-58b4-42e2-94a5-dda83b89be64\") " Sep 30 13:03:26 crc kubenswrapper[5002]: I0930 13:03:26.725791 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-ceilometer-compute-config-data-0\") pod \"5614484d-58b4-42e2-94a5-dda83b89be64\" (UID: \"5614484d-58b4-42e2-94a5-dda83b89be64\") " Sep 30 13:03:26 crc kubenswrapper[5002]: I0930 13:03:26.725833 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-inventory\") pod \"5614484d-58b4-42e2-94a5-dda83b89be64\" (UID: \"5614484d-58b4-42e2-94a5-dda83b89be64\") " Sep 30 13:03:26 crc kubenswrapper[5002]: I0930 13:03:26.725922 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-ceilometer-compute-config-data-2\") pod \"5614484d-58b4-42e2-94a5-dda83b89be64\" (UID: \"5614484d-58b4-42e2-94a5-dda83b89be64\") " Sep 30 13:03:26 crc kubenswrapper[5002]: I0930 13:03:26.733345 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "5614484d-58b4-42e2-94a5-dda83b89be64" (UID: "5614484d-58b4-42e2-94a5-dda83b89be64"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:03:26 crc kubenswrapper[5002]: I0930 13:03:26.737795 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5614484d-58b4-42e2-94a5-dda83b89be64-kube-api-access-pw59d" (OuterVolumeSpecName: "kube-api-access-pw59d") pod "5614484d-58b4-42e2-94a5-dda83b89be64" (UID: "5614484d-58b4-42e2-94a5-dda83b89be64"). InnerVolumeSpecName "kube-api-access-pw59d". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:03:26 crc kubenswrapper[5002]: I0930 13:03:26.757858 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "5614484d-58b4-42e2-94a5-dda83b89be64" (UID: "5614484d-58b4-42e2-94a5-dda83b89be64"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:03:26 crc kubenswrapper[5002]: I0930 13:03:26.761811 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "5614484d-58b4-42e2-94a5-dda83b89be64" (UID: "5614484d-58b4-42e2-94a5-dda83b89be64"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:03:26 crc kubenswrapper[5002]: I0930 13:03:26.761963 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5614484d-58b4-42e2-94a5-dda83b89be64" (UID: "5614484d-58b4-42e2-94a5-dda83b89be64"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:03:26 crc kubenswrapper[5002]: I0930 13:03:26.766025 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "5614484d-58b4-42e2-94a5-dda83b89be64" (UID: "5614484d-58b4-42e2-94a5-dda83b89be64"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:03:26 crc kubenswrapper[5002]: I0930 13:03:26.768200 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-inventory" (OuterVolumeSpecName: "inventory") pod "5614484d-58b4-42e2-94a5-dda83b89be64" (UID: "5614484d-58b4-42e2-94a5-dda83b89be64"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:03:26 crc kubenswrapper[5002]: I0930 13:03:26.829632 5002 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Sep 30 13:03:26 crc kubenswrapper[5002]: I0930 13:03:26.829720 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pw59d\" (UniqueName: \"kubernetes.io/projected/5614484d-58b4-42e2-94a5-dda83b89be64-kube-api-access-pw59d\") on node \"crc\" DevicePath \"\"" Sep 30 13:03:26 crc kubenswrapper[5002]: I0930 13:03:26.829752 5002 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 13:03:26 crc kubenswrapper[5002]: I0930 13:03:26.829768 5002 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:03:26 crc kubenswrapper[5002]: I0930 13:03:26.829783 5002 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Sep 30 13:03:26 crc kubenswrapper[5002]: I0930 13:03:26.829799 5002 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Sep 30 13:03:26 crc kubenswrapper[5002]: I0930 13:03:26.829815 5002 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5614484d-58b4-42e2-94a5-dda83b89be64-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 13:03:27 crc kubenswrapper[5002]: I0930 13:03:27.138877 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" event={"ID":"5614484d-58b4-42e2-94a5-dda83b89be64","Type":"ContainerDied","Data":"5ecedc38008edec62f82968e70750a794756d72a9acf33e91ba82f29df9994e2"} Sep 30 13:03:27 crc kubenswrapper[5002]: I0930 13:03:27.138930 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ecedc38008edec62f82968e70750a794756d72a9acf33e91ba82f29df9994e2" Sep 30 13:03:27 crc kubenswrapper[5002]: I0930 13:03:27.138929 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-r748r" Sep 30 13:04:02 crc kubenswrapper[5002]: I0930 13:04:02.098224 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:04:02 crc kubenswrapper[5002]: I0930 13:04:02.099224 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:04:03 crc kubenswrapper[5002]: I0930 13:04:03.740443 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7qm5t"] Sep 30 13:04:03 crc kubenswrapper[5002]: E0930 13:04:03.741252 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5614484d-58b4-42e2-94a5-dda83b89be64" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 30 13:04:03 crc kubenswrapper[5002]: I0930 13:04:03.741274 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="5614484d-58b4-42e2-94a5-dda83b89be64" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 30 13:04:03 crc kubenswrapper[5002]: E0930 13:04:03.741301 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="321c3fc2-62d9-46a2-99b3-d1bc4e7e534c" containerName="keystone-cron" Sep 30 13:04:03 crc kubenswrapper[5002]: I0930 13:04:03.741310 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="321c3fc2-62d9-46a2-99b3-d1bc4e7e534c" containerName="keystone-cron" Sep 30 13:04:03 crc kubenswrapper[5002]: I0930 13:04:03.741577 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="321c3fc2-62d9-46a2-99b3-d1bc4e7e534c" containerName="keystone-cron" Sep 30 13:04:03 crc kubenswrapper[5002]: I0930 13:04:03.741600 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="5614484d-58b4-42e2-94a5-dda83b89be64" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 30 13:04:03 crc kubenswrapper[5002]: I0930 13:04:03.743693 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7qm5t" Sep 30 13:04:03 crc kubenswrapper[5002]: I0930 13:04:03.751337 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7qm5t"] Sep 30 13:04:03 crc kubenswrapper[5002]: I0930 13:04:03.853775 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpw5j\" (UniqueName: \"kubernetes.io/projected/378fdb00-09a8-45f6-8c30-a25fa2f3b527-kube-api-access-fpw5j\") pod \"redhat-operators-7qm5t\" (UID: \"378fdb00-09a8-45f6-8c30-a25fa2f3b527\") " pod="openshift-marketplace/redhat-operators-7qm5t" Sep 30 13:04:03 crc kubenswrapper[5002]: I0930 13:04:03.853852 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/378fdb00-09a8-45f6-8c30-a25fa2f3b527-catalog-content\") pod \"redhat-operators-7qm5t\" (UID: \"378fdb00-09a8-45f6-8c30-a25fa2f3b527\") " pod="openshift-marketplace/redhat-operators-7qm5t" Sep 30 13:04:03 crc kubenswrapper[5002]: I0930 13:04:03.854279 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/378fdb00-09a8-45f6-8c30-a25fa2f3b527-utilities\") pod \"redhat-operators-7qm5t\" (UID: \"378fdb00-09a8-45f6-8c30-a25fa2f3b527\") " pod="openshift-marketplace/redhat-operators-7qm5t" Sep 30 13:04:03 crc kubenswrapper[5002]: I0930 13:04:03.955871 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpw5j\" (UniqueName: \"kubernetes.io/projected/378fdb00-09a8-45f6-8c30-a25fa2f3b527-kube-api-access-fpw5j\") pod \"redhat-operators-7qm5t\" (UID: \"378fdb00-09a8-45f6-8c30-a25fa2f3b527\") " pod="openshift-marketplace/redhat-operators-7qm5t" Sep 30 13:04:03 crc kubenswrapper[5002]: I0930 13:04:03.955931 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/378fdb00-09a8-45f6-8c30-a25fa2f3b527-catalog-content\") pod \"redhat-operators-7qm5t\" (UID: \"378fdb00-09a8-45f6-8c30-a25fa2f3b527\") " pod="openshift-marketplace/redhat-operators-7qm5t" Sep 30 13:04:03 crc kubenswrapper[5002]: I0930 13:04:03.956036 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/378fdb00-09a8-45f6-8c30-a25fa2f3b527-utilities\") pod \"redhat-operators-7qm5t\" (UID: \"378fdb00-09a8-45f6-8c30-a25fa2f3b527\") " pod="openshift-marketplace/redhat-operators-7qm5t" Sep 30 13:04:03 crc kubenswrapper[5002]: I0930 13:04:03.956550 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/378fdb00-09a8-45f6-8c30-a25fa2f3b527-utilities\") pod \"redhat-operators-7qm5t\" (UID: \"378fdb00-09a8-45f6-8c30-a25fa2f3b527\") " pod="openshift-marketplace/redhat-operators-7qm5t" Sep 30 13:04:03 crc kubenswrapper[5002]: I0930 13:04:03.956572 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/378fdb00-09a8-45f6-8c30-a25fa2f3b527-catalog-content\") pod \"redhat-operators-7qm5t\" (UID: \"378fdb00-09a8-45f6-8c30-a25fa2f3b527\") " pod="openshift-marketplace/redhat-operators-7qm5t" Sep 30 13:04:03 crc kubenswrapper[5002]: I0930 13:04:03.975502 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpw5j\" (UniqueName: \"kubernetes.io/projected/378fdb00-09a8-45f6-8c30-a25fa2f3b527-kube-api-access-fpw5j\") pod \"redhat-operators-7qm5t\" (UID: \"378fdb00-09a8-45f6-8c30-a25fa2f3b527\") " pod="openshift-marketplace/redhat-operators-7qm5t" Sep 30 13:04:04 crc kubenswrapper[5002]: I0930 13:04:04.076842 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7qm5t" Sep 30 13:04:04 crc kubenswrapper[5002]: I0930 13:04:04.533911 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7qm5t"] Sep 30 13:04:05 crc kubenswrapper[5002]: I0930 13:04:05.494688 5002 generic.go:334] "Generic (PLEG): container finished" podID="378fdb00-09a8-45f6-8c30-a25fa2f3b527" containerID="b074a9cacd516fcdace4d9536649b0a675c2a8cc82436bee1033fe1fe62bfaaf" exitCode=0 Sep 30 13:04:05 crc kubenswrapper[5002]: I0930 13:04:05.494752 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7qm5t" event={"ID":"378fdb00-09a8-45f6-8c30-a25fa2f3b527","Type":"ContainerDied","Data":"b074a9cacd516fcdace4d9536649b0a675c2a8cc82436bee1033fe1fe62bfaaf"} Sep 30 13:04:05 crc kubenswrapper[5002]: I0930 13:04:05.495305 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7qm5t" event={"ID":"378fdb00-09a8-45f6-8c30-a25fa2f3b527","Type":"ContainerStarted","Data":"633a29a093541952176fa6406013d0f487936d360cf68923e8e3b8687b6f1742"} Sep 30 13:04:05 crc kubenswrapper[5002]: I0930 13:04:05.496884 5002 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 13:04:07 crc kubenswrapper[5002]: I0930 13:04:07.517974 5002 generic.go:334] "Generic (PLEG): container finished" podID="378fdb00-09a8-45f6-8c30-a25fa2f3b527" containerID="749bab4e916269b7325b59f190db95e980394ad2ecbebe25bad8a8897e9c18a7" exitCode=0 Sep 30 13:04:07 crc kubenswrapper[5002]: I0930 13:04:07.518156 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7qm5t" event={"ID":"378fdb00-09a8-45f6-8c30-a25fa2f3b527","Type":"ContainerDied","Data":"749bab4e916269b7325b59f190db95e980394ad2ecbebe25bad8a8897e9c18a7"} Sep 30 13:04:08 crc kubenswrapper[5002]: I0930 13:04:08.529276 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7qm5t" event={"ID":"378fdb00-09a8-45f6-8c30-a25fa2f3b527","Type":"ContainerStarted","Data":"2f8fc365623d9c3e5045f879d4c6cc704b2db3c3e3915760fad0a2d1b5a22897"} Sep 30 13:04:08 crc kubenswrapper[5002]: I0930 13:04:08.555755 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7qm5t" podStartSLOduration=2.790563775 podStartE2EDuration="5.555725014s" podCreationTimestamp="2025-09-30 13:04:03 +0000 UTC" firstStartedPulling="2025-09-30 13:04:05.496590363 +0000 UTC m=+2619.746272509" lastFinishedPulling="2025-09-30 13:04:08.261751602 +0000 UTC m=+2622.511433748" observedRunningTime="2025-09-30 13:04:08.547594087 +0000 UTC m=+2622.797276263" watchObservedRunningTime="2025-09-30 13:04:08.555725014 +0000 UTC m=+2622.805407210" Sep 30 13:04:14 crc kubenswrapper[5002]: I0930 13:04:14.077644 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7qm5t" Sep 30 13:04:14 crc kubenswrapper[5002]: I0930 13:04:14.078165 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7qm5t" Sep 30 13:04:14 crc kubenswrapper[5002]: I0930 13:04:14.145657 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7qm5t" Sep 30 13:04:14 crc kubenswrapper[5002]: I0930 13:04:14.645700 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7qm5t" Sep 30 13:04:14 crc kubenswrapper[5002]: I0930 13:04:14.695302 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7qm5t"] Sep 30 13:04:16 crc kubenswrapper[5002]: I0930 13:04:16.604900 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7qm5t" podUID="378fdb00-09a8-45f6-8c30-a25fa2f3b527" containerName="registry-server" containerID="cri-o://2f8fc365623d9c3e5045f879d4c6cc704b2db3c3e3915760fad0a2d1b5a22897" gracePeriod=2 Sep 30 13:04:17 crc kubenswrapper[5002]: I0930 13:04:17.056034 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7qm5t" Sep 30 13:04:17 crc kubenswrapper[5002]: I0930 13:04:17.132064 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fpw5j\" (UniqueName: \"kubernetes.io/projected/378fdb00-09a8-45f6-8c30-a25fa2f3b527-kube-api-access-fpw5j\") pod \"378fdb00-09a8-45f6-8c30-a25fa2f3b527\" (UID: \"378fdb00-09a8-45f6-8c30-a25fa2f3b527\") " Sep 30 13:04:17 crc kubenswrapper[5002]: I0930 13:04:17.132177 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/378fdb00-09a8-45f6-8c30-a25fa2f3b527-catalog-content\") pod \"378fdb00-09a8-45f6-8c30-a25fa2f3b527\" (UID: \"378fdb00-09a8-45f6-8c30-a25fa2f3b527\") " Sep 30 13:04:17 crc kubenswrapper[5002]: I0930 13:04:17.132227 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/378fdb00-09a8-45f6-8c30-a25fa2f3b527-utilities\") pod \"378fdb00-09a8-45f6-8c30-a25fa2f3b527\" (UID: \"378fdb00-09a8-45f6-8c30-a25fa2f3b527\") " Sep 30 13:04:17 crc kubenswrapper[5002]: I0930 13:04:17.133055 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/378fdb00-09a8-45f6-8c30-a25fa2f3b527-utilities" (OuterVolumeSpecName: "utilities") pod "378fdb00-09a8-45f6-8c30-a25fa2f3b527" (UID: "378fdb00-09a8-45f6-8c30-a25fa2f3b527"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:04:17 crc kubenswrapper[5002]: I0930 13:04:17.138159 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/378fdb00-09a8-45f6-8c30-a25fa2f3b527-kube-api-access-fpw5j" (OuterVolumeSpecName: "kube-api-access-fpw5j") pod "378fdb00-09a8-45f6-8c30-a25fa2f3b527" (UID: "378fdb00-09a8-45f6-8c30-a25fa2f3b527"). InnerVolumeSpecName "kube-api-access-fpw5j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:04:17 crc kubenswrapper[5002]: I0930 13:04:17.220850 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/378fdb00-09a8-45f6-8c30-a25fa2f3b527-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "378fdb00-09a8-45f6-8c30-a25fa2f3b527" (UID: "378fdb00-09a8-45f6-8c30-a25fa2f3b527"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:04:17 crc kubenswrapper[5002]: I0930 13:04:17.234530 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/378fdb00-09a8-45f6-8c30-a25fa2f3b527-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 13:04:17 crc kubenswrapper[5002]: I0930 13:04:17.234567 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/378fdb00-09a8-45f6-8c30-a25fa2f3b527-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 13:04:17 crc kubenswrapper[5002]: I0930 13:04:17.234581 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fpw5j\" (UniqueName: \"kubernetes.io/projected/378fdb00-09a8-45f6-8c30-a25fa2f3b527-kube-api-access-fpw5j\") on node \"crc\" DevicePath \"\"" Sep 30 13:04:17 crc kubenswrapper[5002]: I0930 13:04:17.616186 5002 generic.go:334] "Generic (PLEG): container finished" podID="378fdb00-09a8-45f6-8c30-a25fa2f3b527" containerID="2f8fc365623d9c3e5045f879d4c6cc704b2db3c3e3915760fad0a2d1b5a22897" exitCode=0 Sep 30 13:04:17 crc kubenswrapper[5002]: I0930 13:04:17.616247 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7qm5t" Sep 30 13:04:17 crc kubenswrapper[5002]: I0930 13:04:17.617240 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7qm5t" event={"ID":"378fdb00-09a8-45f6-8c30-a25fa2f3b527","Type":"ContainerDied","Data":"2f8fc365623d9c3e5045f879d4c6cc704b2db3c3e3915760fad0a2d1b5a22897"} Sep 30 13:04:17 crc kubenswrapper[5002]: I0930 13:04:17.617575 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7qm5t" event={"ID":"378fdb00-09a8-45f6-8c30-a25fa2f3b527","Type":"ContainerDied","Data":"633a29a093541952176fa6406013d0f487936d360cf68923e8e3b8687b6f1742"} Sep 30 13:04:17 crc kubenswrapper[5002]: I0930 13:04:17.617649 5002 scope.go:117] "RemoveContainer" containerID="2f8fc365623d9c3e5045f879d4c6cc704b2db3c3e3915760fad0a2d1b5a22897" Sep 30 13:04:17 crc kubenswrapper[5002]: I0930 13:04:17.647131 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7qm5t"] Sep 30 13:04:17 crc kubenswrapper[5002]: I0930 13:04:17.655917 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-7qm5t"] Sep 30 13:04:17 crc kubenswrapper[5002]: I0930 13:04:17.659537 5002 scope.go:117] "RemoveContainer" containerID="749bab4e916269b7325b59f190db95e980394ad2ecbebe25bad8a8897e9c18a7" Sep 30 13:04:17 crc kubenswrapper[5002]: I0930 13:04:17.681664 5002 scope.go:117] "RemoveContainer" containerID="b074a9cacd516fcdace4d9536649b0a675c2a8cc82436bee1033fe1fe62bfaaf" Sep 30 13:04:17 crc kubenswrapper[5002]: I0930 13:04:17.728030 5002 scope.go:117] "RemoveContainer" containerID="2f8fc365623d9c3e5045f879d4c6cc704b2db3c3e3915760fad0a2d1b5a22897" Sep 30 13:04:17 crc kubenswrapper[5002]: E0930 13:04:17.728538 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f8fc365623d9c3e5045f879d4c6cc704b2db3c3e3915760fad0a2d1b5a22897\": container with ID starting with 2f8fc365623d9c3e5045f879d4c6cc704b2db3c3e3915760fad0a2d1b5a22897 not found: ID does not exist" containerID="2f8fc365623d9c3e5045f879d4c6cc704b2db3c3e3915760fad0a2d1b5a22897" Sep 30 13:04:17 crc kubenswrapper[5002]: I0930 13:04:17.728611 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f8fc365623d9c3e5045f879d4c6cc704b2db3c3e3915760fad0a2d1b5a22897"} err="failed to get container status \"2f8fc365623d9c3e5045f879d4c6cc704b2db3c3e3915760fad0a2d1b5a22897\": rpc error: code = NotFound desc = could not find container \"2f8fc365623d9c3e5045f879d4c6cc704b2db3c3e3915760fad0a2d1b5a22897\": container with ID starting with 2f8fc365623d9c3e5045f879d4c6cc704b2db3c3e3915760fad0a2d1b5a22897 not found: ID does not exist" Sep 30 13:04:17 crc kubenswrapper[5002]: I0930 13:04:17.728632 5002 scope.go:117] "RemoveContainer" containerID="749bab4e916269b7325b59f190db95e980394ad2ecbebe25bad8a8897e9c18a7" Sep 30 13:04:17 crc kubenswrapper[5002]: E0930 13:04:17.729003 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"749bab4e916269b7325b59f190db95e980394ad2ecbebe25bad8a8897e9c18a7\": container with ID starting with 749bab4e916269b7325b59f190db95e980394ad2ecbebe25bad8a8897e9c18a7 not found: ID does not exist" containerID="749bab4e916269b7325b59f190db95e980394ad2ecbebe25bad8a8897e9c18a7" Sep 30 13:04:17 crc kubenswrapper[5002]: I0930 13:04:17.729047 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"749bab4e916269b7325b59f190db95e980394ad2ecbebe25bad8a8897e9c18a7"} err="failed to get container status \"749bab4e916269b7325b59f190db95e980394ad2ecbebe25bad8a8897e9c18a7\": rpc error: code = NotFound desc = could not find container \"749bab4e916269b7325b59f190db95e980394ad2ecbebe25bad8a8897e9c18a7\": container with ID starting with 749bab4e916269b7325b59f190db95e980394ad2ecbebe25bad8a8897e9c18a7 not found: ID does not exist" Sep 30 13:04:17 crc kubenswrapper[5002]: I0930 13:04:17.729067 5002 scope.go:117] "RemoveContainer" containerID="b074a9cacd516fcdace4d9536649b0a675c2a8cc82436bee1033fe1fe62bfaaf" Sep 30 13:04:17 crc kubenswrapper[5002]: E0930 13:04:17.729411 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b074a9cacd516fcdace4d9536649b0a675c2a8cc82436bee1033fe1fe62bfaaf\": container with ID starting with b074a9cacd516fcdace4d9536649b0a675c2a8cc82436bee1033fe1fe62bfaaf not found: ID does not exist" containerID="b074a9cacd516fcdace4d9536649b0a675c2a8cc82436bee1033fe1fe62bfaaf" Sep 30 13:04:17 crc kubenswrapper[5002]: I0930 13:04:17.729435 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b074a9cacd516fcdace4d9536649b0a675c2a8cc82436bee1033fe1fe62bfaaf"} err="failed to get container status \"b074a9cacd516fcdace4d9536649b0a675c2a8cc82436bee1033fe1fe62bfaaf\": rpc error: code = NotFound desc = could not find container \"b074a9cacd516fcdace4d9536649b0a675c2a8cc82436bee1033fe1fe62bfaaf\": container with ID starting with b074a9cacd516fcdace4d9536649b0a675c2a8cc82436bee1033fe1fe62bfaaf not found: ID does not exist" Sep 30 13:04:18 crc kubenswrapper[5002]: I0930 13:04:18.688650 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="378fdb00-09a8-45f6-8c30-a25fa2f3b527" path="/var/lib/kubelet/pods/378fdb00-09a8-45f6-8c30-a25fa2f3b527/volumes" Sep 30 13:04:32 crc kubenswrapper[5002]: I0930 13:04:32.098829 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:04:32 crc kubenswrapper[5002]: I0930 13:04:32.099369 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.486466 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Sep 30 13:04:33 crc kubenswrapper[5002]: E0930 13:04:33.487776 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="378fdb00-09a8-45f6-8c30-a25fa2f3b527" containerName="registry-server" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.487801 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="378fdb00-09a8-45f6-8c30-a25fa2f3b527" containerName="registry-server" Sep 30 13:04:33 crc kubenswrapper[5002]: E0930 13:04:33.487844 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="378fdb00-09a8-45f6-8c30-a25fa2f3b527" containerName="extract-utilities" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.487859 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="378fdb00-09a8-45f6-8c30-a25fa2f3b527" containerName="extract-utilities" Sep 30 13:04:33 crc kubenswrapper[5002]: E0930 13:04:33.487889 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="378fdb00-09a8-45f6-8c30-a25fa2f3b527" containerName="extract-content" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.487901 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="378fdb00-09a8-45f6-8c30-a25fa2f3b527" containerName="extract-content" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.488265 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="378fdb00-09a8-45f6-8c30-a25fa2f3b527" containerName="registry-server" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.489276 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.491566 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.491602 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.493120 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-xltdh" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.494816 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.505921 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.544711 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"tempest-tests-tempest\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " pod="openstack/tempest-tests-tempest" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.544782 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/32bc4a2b-b531-4126-8920-ec50156dc863-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " pod="openstack/tempest-tests-tempest" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.544808 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/32bc4a2b-b531-4126-8920-ec50156dc863-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " pod="openstack/tempest-tests-tempest" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.544850 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/32bc4a2b-b531-4126-8920-ec50156dc863-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " pod="openstack/tempest-tests-tempest" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.544890 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/32bc4a2b-b531-4126-8920-ec50156dc863-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " pod="openstack/tempest-tests-tempest" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.544917 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/32bc4a2b-b531-4126-8920-ec50156dc863-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " pod="openstack/tempest-tests-tempest" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.545047 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/32bc4a2b-b531-4126-8920-ec50156dc863-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " pod="openstack/tempest-tests-tempest" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.545085 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dccr4\" (UniqueName: \"kubernetes.io/projected/32bc4a2b-b531-4126-8920-ec50156dc863-kube-api-access-dccr4\") pod \"tempest-tests-tempest\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " pod="openstack/tempest-tests-tempest" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.545110 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/32bc4a2b-b531-4126-8920-ec50156dc863-config-data\") pod \"tempest-tests-tempest\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " pod="openstack/tempest-tests-tempest" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.646777 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/32bc4a2b-b531-4126-8920-ec50156dc863-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " pod="openstack/tempest-tests-tempest" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.646849 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dccr4\" (UniqueName: \"kubernetes.io/projected/32bc4a2b-b531-4126-8920-ec50156dc863-kube-api-access-dccr4\") pod \"tempest-tests-tempest\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " pod="openstack/tempest-tests-tempest" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.646877 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/32bc4a2b-b531-4126-8920-ec50156dc863-config-data\") pod \"tempest-tests-tempest\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " pod="openstack/tempest-tests-tempest" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.646936 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"tempest-tests-tempest\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " pod="openstack/tempest-tests-tempest" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.646990 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/32bc4a2b-b531-4126-8920-ec50156dc863-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " pod="openstack/tempest-tests-tempest" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.647020 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/32bc4a2b-b531-4126-8920-ec50156dc863-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " pod="openstack/tempest-tests-tempest" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.647065 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/32bc4a2b-b531-4126-8920-ec50156dc863-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " pod="openstack/tempest-tests-tempest" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.647101 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/32bc4a2b-b531-4126-8920-ec50156dc863-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " pod="openstack/tempest-tests-tempest" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.647219 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/32bc4a2b-b531-4126-8920-ec50156dc863-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " pod="openstack/tempest-tests-tempest" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.647412 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"tempest-tests-tempest\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/tempest-tests-tempest" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.647710 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/32bc4a2b-b531-4126-8920-ec50156dc863-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " pod="openstack/tempest-tests-tempest" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.647743 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/32bc4a2b-b531-4126-8920-ec50156dc863-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " pod="openstack/tempest-tests-tempest" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.647834 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/32bc4a2b-b531-4126-8920-ec50156dc863-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " pod="openstack/tempest-tests-tempest" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.648580 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/32bc4a2b-b531-4126-8920-ec50156dc863-config-data\") pod \"tempest-tests-tempest\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " pod="openstack/tempest-tests-tempest" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.653704 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/32bc4a2b-b531-4126-8920-ec50156dc863-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " pod="openstack/tempest-tests-tempest" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.654295 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/32bc4a2b-b531-4126-8920-ec50156dc863-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " pod="openstack/tempest-tests-tempest" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.655160 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/32bc4a2b-b531-4126-8920-ec50156dc863-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " pod="openstack/tempest-tests-tempest" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.676823 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"tempest-tests-tempest\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " pod="openstack/tempest-tests-tempest" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.676960 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dccr4\" (UniqueName: \"kubernetes.io/projected/32bc4a2b-b531-4126-8920-ec50156dc863-kube-api-access-dccr4\") pod \"tempest-tests-tempest\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " pod="openstack/tempest-tests-tempest" Sep 30 13:04:33 crc kubenswrapper[5002]: I0930 13:04:33.823875 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 30 13:04:34 crc kubenswrapper[5002]: I0930 13:04:34.308811 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Sep 30 13:04:34 crc kubenswrapper[5002]: W0930 13:04:34.314592 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod32bc4a2b_b531_4126_8920_ec50156dc863.slice/crio-dd342fbb4d502066a3601f484d61c6b2664a704c1566a169f763bbae6df328d4 WatchSource:0}: Error finding container dd342fbb4d502066a3601f484d61c6b2664a704c1566a169f763bbae6df328d4: Status 404 returned error can't find the container with id dd342fbb4d502066a3601f484d61c6b2664a704c1566a169f763bbae6df328d4 Sep 30 13:04:34 crc kubenswrapper[5002]: I0930 13:04:34.779448 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"32bc4a2b-b531-4126-8920-ec50156dc863","Type":"ContainerStarted","Data":"dd342fbb4d502066a3601f484d61c6b2664a704c1566a169f763bbae6df328d4"} Sep 30 13:05:02 crc kubenswrapper[5002]: I0930 13:05:02.098896 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:05:02 crc kubenswrapper[5002]: I0930 13:05:02.099543 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:05:02 crc kubenswrapper[5002]: I0930 13:05:02.099605 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" Sep 30 13:05:02 crc kubenswrapper[5002]: I0930 13:05:02.100772 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2171ea1d3e69548aa803f0251f328d7b726c5d0d03df99465807718d28b0c056"} pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 13:05:02 crc kubenswrapper[5002]: I0930 13:05:02.100852 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" containerID="cri-o://2171ea1d3e69548aa803f0251f328d7b726c5d0d03df99465807718d28b0c056" gracePeriod=600 Sep 30 13:05:02 crc kubenswrapper[5002]: E0930 13:05:02.625135 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Sep 30 13:05:02 crc kubenswrapper[5002]: E0930 13:05:02.625757 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dccr4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(32bc4a2b-b531-4126-8920-ec50156dc863): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 13:05:02 crc kubenswrapper[5002]: E0930 13:05:02.628777 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="32bc4a2b-b531-4126-8920-ec50156dc863" Sep 30 13:05:03 crc kubenswrapper[5002]: I0930 13:05:03.058706 5002 generic.go:334] "Generic (PLEG): container finished" podID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerID="2171ea1d3e69548aa803f0251f328d7b726c5d0d03df99465807718d28b0c056" exitCode=0 Sep 30 13:05:03 crc kubenswrapper[5002]: I0930 13:05:03.059690 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerDied","Data":"2171ea1d3e69548aa803f0251f328d7b726c5d0d03df99465807718d28b0c056"} Sep 30 13:05:03 crc kubenswrapper[5002]: I0930 13:05:03.059723 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerStarted","Data":"1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98"} Sep 30 13:05:03 crc kubenswrapper[5002]: I0930 13:05:03.059738 5002 scope.go:117] "RemoveContainer" containerID="c399d44e9bfc45838bcb7d385b7879008ca1a51ef33744b6ce2235833eb197e0" Sep 30 13:05:03 crc kubenswrapper[5002]: E0930 13:05:03.061950 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="32bc4a2b-b531-4126-8920-ec50156dc863" Sep 30 13:05:19 crc kubenswrapper[5002]: I0930 13:05:19.228564 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"32bc4a2b-b531-4126-8920-ec50156dc863","Type":"ContainerStarted","Data":"bdd0ec470ba32d08cb4315ac5d534d320832f8a15a8be87b1b855f9a988aa604"} Sep 30 13:05:19 crc kubenswrapper[5002]: I0930 13:05:19.253158 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.803390313 podStartE2EDuration="47.253138071s" podCreationTimestamp="2025-09-30 13:04:32 +0000 UTC" firstStartedPulling="2025-09-30 13:04:34.31839457 +0000 UTC m=+2648.568076726" lastFinishedPulling="2025-09-30 13:05:17.768142308 +0000 UTC m=+2692.017824484" observedRunningTime="2025-09-30 13:05:19.247039454 +0000 UTC m=+2693.496721600" watchObservedRunningTime="2025-09-30 13:05:19.253138071 +0000 UTC m=+2693.502820217" Sep 30 13:06:06 crc kubenswrapper[5002]: I0930 13:06:06.784425 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mh7cn"] Sep 30 13:06:06 crc kubenswrapper[5002]: I0930 13:06:06.796252 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mh7cn"] Sep 30 13:06:06 crc kubenswrapper[5002]: I0930 13:06:06.796363 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mh7cn" Sep 30 13:06:06 crc kubenswrapper[5002]: I0930 13:06:06.881422 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9gp4\" (UniqueName: \"kubernetes.io/projected/b5658aba-c030-416d-86c6-1708b6854862-kube-api-access-b9gp4\") pod \"certified-operators-mh7cn\" (UID: \"b5658aba-c030-416d-86c6-1708b6854862\") " pod="openshift-marketplace/certified-operators-mh7cn" Sep 30 13:06:06 crc kubenswrapper[5002]: I0930 13:06:06.881530 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5658aba-c030-416d-86c6-1708b6854862-catalog-content\") pod \"certified-operators-mh7cn\" (UID: \"b5658aba-c030-416d-86c6-1708b6854862\") " pod="openshift-marketplace/certified-operators-mh7cn" Sep 30 13:06:06 crc kubenswrapper[5002]: I0930 13:06:06.881669 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5658aba-c030-416d-86c6-1708b6854862-utilities\") pod \"certified-operators-mh7cn\" (UID: \"b5658aba-c030-416d-86c6-1708b6854862\") " pod="openshift-marketplace/certified-operators-mh7cn" Sep 30 13:06:06 crc kubenswrapper[5002]: I0930 13:06:06.983292 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9gp4\" (UniqueName: \"kubernetes.io/projected/b5658aba-c030-416d-86c6-1708b6854862-kube-api-access-b9gp4\") pod \"certified-operators-mh7cn\" (UID: \"b5658aba-c030-416d-86c6-1708b6854862\") " pod="openshift-marketplace/certified-operators-mh7cn" Sep 30 13:06:06 crc kubenswrapper[5002]: I0930 13:06:06.983366 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5658aba-c030-416d-86c6-1708b6854862-catalog-content\") pod \"certified-operators-mh7cn\" (UID: \"b5658aba-c030-416d-86c6-1708b6854862\") " pod="openshift-marketplace/certified-operators-mh7cn" Sep 30 13:06:06 crc kubenswrapper[5002]: I0930 13:06:06.983451 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5658aba-c030-416d-86c6-1708b6854862-utilities\") pod \"certified-operators-mh7cn\" (UID: \"b5658aba-c030-416d-86c6-1708b6854862\") " pod="openshift-marketplace/certified-operators-mh7cn" Sep 30 13:06:06 crc kubenswrapper[5002]: I0930 13:06:06.983897 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5658aba-c030-416d-86c6-1708b6854862-utilities\") pod \"certified-operators-mh7cn\" (UID: \"b5658aba-c030-416d-86c6-1708b6854862\") " pod="openshift-marketplace/certified-operators-mh7cn" Sep 30 13:06:06 crc kubenswrapper[5002]: I0930 13:06:06.984011 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5658aba-c030-416d-86c6-1708b6854862-catalog-content\") pod \"certified-operators-mh7cn\" (UID: \"b5658aba-c030-416d-86c6-1708b6854862\") " pod="openshift-marketplace/certified-operators-mh7cn" Sep 30 13:06:07 crc kubenswrapper[5002]: I0930 13:06:07.010903 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9gp4\" (UniqueName: \"kubernetes.io/projected/b5658aba-c030-416d-86c6-1708b6854862-kube-api-access-b9gp4\") pod \"certified-operators-mh7cn\" (UID: \"b5658aba-c030-416d-86c6-1708b6854862\") " pod="openshift-marketplace/certified-operators-mh7cn" Sep 30 13:06:07 crc kubenswrapper[5002]: I0930 13:06:07.113307 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mh7cn" Sep 30 13:06:07 crc kubenswrapper[5002]: I0930 13:06:07.605521 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mh7cn"] Sep 30 13:06:07 crc kubenswrapper[5002]: W0930 13:06:07.610462 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb5658aba_c030_416d_86c6_1708b6854862.slice/crio-5c7ccb561537a30f25bddd18741b62e20aaff806fde5dc56cbde3406e90e47a3 WatchSource:0}: Error finding container 5c7ccb561537a30f25bddd18741b62e20aaff806fde5dc56cbde3406e90e47a3: Status 404 returned error can't find the container with id 5c7ccb561537a30f25bddd18741b62e20aaff806fde5dc56cbde3406e90e47a3 Sep 30 13:06:07 crc kubenswrapper[5002]: I0930 13:06:07.783179 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mh7cn" event={"ID":"b5658aba-c030-416d-86c6-1708b6854862","Type":"ContainerStarted","Data":"5c7ccb561537a30f25bddd18741b62e20aaff806fde5dc56cbde3406e90e47a3"} Sep 30 13:06:08 crc kubenswrapper[5002]: I0930 13:06:08.195652 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rqfbc"] Sep 30 13:06:08 crc kubenswrapper[5002]: I0930 13:06:08.197727 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rqfbc" Sep 30 13:06:08 crc kubenswrapper[5002]: I0930 13:06:08.214270 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rqfbc"] Sep 30 13:06:08 crc kubenswrapper[5002]: I0930 13:06:08.306207 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d184b411-2932-4ace-a20a-b81cdfec713a-catalog-content\") pod \"community-operators-rqfbc\" (UID: \"d184b411-2932-4ace-a20a-b81cdfec713a\") " pod="openshift-marketplace/community-operators-rqfbc" Sep 30 13:06:08 crc kubenswrapper[5002]: I0930 13:06:08.306323 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d184b411-2932-4ace-a20a-b81cdfec713a-utilities\") pod \"community-operators-rqfbc\" (UID: \"d184b411-2932-4ace-a20a-b81cdfec713a\") " pod="openshift-marketplace/community-operators-rqfbc" Sep 30 13:06:08 crc kubenswrapper[5002]: I0930 13:06:08.306418 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2w7mf\" (UniqueName: \"kubernetes.io/projected/d184b411-2932-4ace-a20a-b81cdfec713a-kube-api-access-2w7mf\") pod \"community-operators-rqfbc\" (UID: \"d184b411-2932-4ace-a20a-b81cdfec713a\") " pod="openshift-marketplace/community-operators-rqfbc" Sep 30 13:06:08 crc kubenswrapper[5002]: I0930 13:06:08.407994 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2w7mf\" (UniqueName: \"kubernetes.io/projected/d184b411-2932-4ace-a20a-b81cdfec713a-kube-api-access-2w7mf\") pod \"community-operators-rqfbc\" (UID: \"d184b411-2932-4ace-a20a-b81cdfec713a\") " pod="openshift-marketplace/community-operators-rqfbc" Sep 30 13:06:08 crc kubenswrapper[5002]: I0930 13:06:08.408079 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d184b411-2932-4ace-a20a-b81cdfec713a-catalog-content\") pod \"community-operators-rqfbc\" (UID: \"d184b411-2932-4ace-a20a-b81cdfec713a\") " pod="openshift-marketplace/community-operators-rqfbc" Sep 30 13:06:08 crc kubenswrapper[5002]: I0930 13:06:08.408186 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d184b411-2932-4ace-a20a-b81cdfec713a-utilities\") pod \"community-operators-rqfbc\" (UID: \"d184b411-2932-4ace-a20a-b81cdfec713a\") " pod="openshift-marketplace/community-operators-rqfbc" Sep 30 13:06:08 crc kubenswrapper[5002]: I0930 13:06:08.408709 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d184b411-2932-4ace-a20a-b81cdfec713a-catalog-content\") pod \"community-operators-rqfbc\" (UID: \"d184b411-2932-4ace-a20a-b81cdfec713a\") " pod="openshift-marketplace/community-operators-rqfbc" Sep 30 13:06:08 crc kubenswrapper[5002]: I0930 13:06:08.408715 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d184b411-2932-4ace-a20a-b81cdfec713a-utilities\") pod \"community-operators-rqfbc\" (UID: \"d184b411-2932-4ace-a20a-b81cdfec713a\") " pod="openshift-marketplace/community-operators-rqfbc" Sep 30 13:06:08 crc kubenswrapper[5002]: I0930 13:06:08.427555 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2w7mf\" (UniqueName: \"kubernetes.io/projected/d184b411-2932-4ace-a20a-b81cdfec713a-kube-api-access-2w7mf\") pod \"community-operators-rqfbc\" (UID: \"d184b411-2932-4ace-a20a-b81cdfec713a\") " pod="openshift-marketplace/community-operators-rqfbc" Sep 30 13:06:08 crc kubenswrapper[5002]: I0930 13:06:08.519103 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rqfbc" Sep 30 13:06:08 crc kubenswrapper[5002]: I0930 13:06:08.792378 5002 generic.go:334] "Generic (PLEG): container finished" podID="b5658aba-c030-416d-86c6-1708b6854862" containerID="0c6d9f69b02980151fbfd4349d1a3c8ba41dd70971c34be8e2e90eee4385ef30" exitCode=0 Sep 30 13:06:08 crc kubenswrapper[5002]: I0930 13:06:08.792424 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mh7cn" event={"ID":"b5658aba-c030-416d-86c6-1708b6854862","Type":"ContainerDied","Data":"0c6d9f69b02980151fbfd4349d1a3c8ba41dd70971c34be8e2e90eee4385ef30"} Sep 30 13:06:09 crc kubenswrapper[5002]: I0930 13:06:09.005019 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rqfbc"] Sep 30 13:06:09 crc kubenswrapper[5002]: W0930 13:06:09.015930 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd184b411_2932_4ace_a20a_b81cdfec713a.slice/crio-23dbc67d360f91111b2c5747adc5e65f1823e28d150341dcb01b9687a383f9f7 WatchSource:0}: Error finding container 23dbc67d360f91111b2c5747adc5e65f1823e28d150341dcb01b9687a383f9f7: Status 404 returned error can't find the container with id 23dbc67d360f91111b2c5747adc5e65f1823e28d150341dcb01b9687a383f9f7 Sep 30 13:06:09 crc kubenswrapper[5002]: I0930 13:06:09.802774 5002 generic.go:334] "Generic (PLEG): container finished" podID="d184b411-2932-4ace-a20a-b81cdfec713a" containerID="186159855c8ad5720e6196fd5586c387e213d70018b5d5968cbb07b402b5aa95" exitCode=0 Sep 30 13:06:09 crc kubenswrapper[5002]: I0930 13:06:09.803107 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rqfbc" event={"ID":"d184b411-2932-4ace-a20a-b81cdfec713a","Type":"ContainerDied","Data":"186159855c8ad5720e6196fd5586c387e213d70018b5d5968cbb07b402b5aa95"} Sep 30 13:06:09 crc kubenswrapper[5002]: I0930 13:06:09.805123 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rqfbc" event={"ID":"d184b411-2932-4ace-a20a-b81cdfec713a","Type":"ContainerStarted","Data":"23dbc67d360f91111b2c5747adc5e65f1823e28d150341dcb01b9687a383f9f7"} Sep 30 13:06:09 crc kubenswrapper[5002]: I0930 13:06:09.811082 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mh7cn" event={"ID":"b5658aba-c030-416d-86c6-1708b6854862","Type":"ContainerStarted","Data":"73b2028c2527e243c80157a58edca433ddfb09646851e23404feeb7fce249c6b"} Sep 30 13:06:10 crc kubenswrapper[5002]: I0930 13:06:10.822868 5002 generic.go:334] "Generic (PLEG): container finished" podID="b5658aba-c030-416d-86c6-1708b6854862" containerID="73b2028c2527e243c80157a58edca433ddfb09646851e23404feeb7fce249c6b" exitCode=0 Sep 30 13:06:10 crc kubenswrapper[5002]: I0930 13:06:10.822958 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mh7cn" event={"ID":"b5658aba-c030-416d-86c6-1708b6854862","Type":"ContainerDied","Data":"73b2028c2527e243c80157a58edca433ddfb09646851e23404feeb7fce249c6b"} Sep 30 13:06:11 crc kubenswrapper[5002]: I0930 13:06:11.580546 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fbswt"] Sep 30 13:06:11 crc kubenswrapper[5002]: I0930 13:06:11.583550 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fbswt" Sep 30 13:06:11 crc kubenswrapper[5002]: I0930 13:06:11.605489 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fbswt"] Sep 30 13:06:11 crc kubenswrapper[5002]: I0930 13:06:11.680099 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5eedd99-919b-4ab1-96e4-468fffd6bd2f-utilities\") pod \"redhat-marketplace-fbswt\" (UID: \"c5eedd99-919b-4ab1-96e4-468fffd6bd2f\") " pod="openshift-marketplace/redhat-marketplace-fbswt" Sep 30 13:06:11 crc kubenswrapper[5002]: I0930 13:06:11.680340 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5eedd99-919b-4ab1-96e4-468fffd6bd2f-catalog-content\") pod \"redhat-marketplace-fbswt\" (UID: \"c5eedd99-919b-4ab1-96e4-468fffd6bd2f\") " pod="openshift-marketplace/redhat-marketplace-fbswt" Sep 30 13:06:11 crc kubenswrapper[5002]: I0930 13:06:11.680376 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qt2md\" (UniqueName: \"kubernetes.io/projected/c5eedd99-919b-4ab1-96e4-468fffd6bd2f-kube-api-access-qt2md\") pod \"redhat-marketplace-fbswt\" (UID: \"c5eedd99-919b-4ab1-96e4-468fffd6bd2f\") " pod="openshift-marketplace/redhat-marketplace-fbswt" Sep 30 13:06:11 crc kubenswrapper[5002]: I0930 13:06:11.782529 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5eedd99-919b-4ab1-96e4-468fffd6bd2f-catalog-content\") pod \"redhat-marketplace-fbswt\" (UID: \"c5eedd99-919b-4ab1-96e4-468fffd6bd2f\") " pod="openshift-marketplace/redhat-marketplace-fbswt" Sep 30 13:06:11 crc kubenswrapper[5002]: I0930 13:06:11.782600 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qt2md\" (UniqueName: \"kubernetes.io/projected/c5eedd99-919b-4ab1-96e4-468fffd6bd2f-kube-api-access-qt2md\") pod \"redhat-marketplace-fbswt\" (UID: \"c5eedd99-919b-4ab1-96e4-468fffd6bd2f\") " pod="openshift-marketplace/redhat-marketplace-fbswt" Sep 30 13:06:11 crc kubenswrapper[5002]: I0930 13:06:11.782676 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5eedd99-919b-4ab1-96e4-468fffd6bd2f-utilities\") pod \"redhat-marketplace-fbswt\" (UID: \"c5eedd99-919b-4ab1-96e4-468fffd6bd2f\") " pod="openshift-marketplace/redhat-marketplace-fbswt" Sep 30 13:06:11 crc kubenswrapper[5002]: I0930 13:06:11.783200 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5eedd99-919b-4ab1-96e4-468fffd6bd2f-utilities\") pod \"redhat-marketplace-fbswt\" (UID: \"c5eedd99-919b-4ab1-96e4-468fffd6bd2f\") " pod="openshift-marketplace/redhat-marketplace-fbswt" Sep 30 13:06:11 crc kubenswrapper[5002]: I0930 13:06:11.783273 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5eedd99-919b-4ab1-96e4-468fffd6bd2f-catalog-content\") pod \"redhat-marketplace-fbswt\" (UID: \"c5eedd99-919b-4ab1-96e4-468fffd6bd2f\") " pod="openshift-marketplace/redhat-marketplace-fbswt" Sep 30 13:06:11 crc kubenswrapper[5002]: I0930 13:06:11.830747 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qt2md\" (UniqueName: \"kubernetes.io/projected/c5eedd99-919b-4ab1-96e4-468fffd6bd2f-kube-api-access-qt2md\") pod \"redhat-marketplace-fbswt\" (UID: \"c5eedd99-919b-4ab1-96e4-468fffd6bd2f\") " pod="openshift-marketplace/redhat-marketplace-fbswt" Sep 30 13:06:11 crc kubenswrapper[5002]: I0930 13:06:11.864836 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mh7cn" event={"ID":"b5658aba-c030-416d-86c6-1708b6854862","Type":"ContainerStarted","Data":"c67bc6e4f03f0b96438b9b318af94ebe9c720a9a86fd743cfe85f99f8da6305f"} Sep 30 13:06:11 crc kubenswrapper[5002]: I0930 13:06:11.880132 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mh7cn" podStartSLOduration=3.071603557 podStartE2EDuration="5.880115505s" podCreationTimestamp="2025-09-30 13:06:06 +0000 UTC" firstStartedPulling="2025-09-30 13:06:08.794198417 +0000 UTC m=+2743.043880563" lastFinishedPulling="2025-09-30 13:06:11.602710365 +0000 UTC m=+2745.852392511" observedRunningTime="2025-09-30 13:06:11.877411425 +0000 UTC m=+2746.127093571" watchObservedRunningTime="2025-09-30 13:06:11.880115505 +0000 UTC m=+2746.129797651" Sep 30 13:06:11 crc kubenswrapper[5002]: I0930 13:06:11.914841 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fbswt" Sep 30 13:06:12 crc kubenswrapper[5002]: I0930 13:06:12.452493 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fbswt"] Sep 30 13:06:12 crc kubenswrapper[5002]: W0930 13:06:12.468111 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc5eedd99_919b_4ab1_96e4_468fffd6bd2f.slice/crio-b3257877138a9fd1ddef7c6b8195cf038a5e3cee3c64647a70a6e2d1aaaca266 WatchSource:0}: Error finding container b3257877138a9fd1ddef7c6b8195cf038a5e3cee3c64647a70a6e2d1aaaca266: Status 404 returned error can't find the container with id b3257877138a9fd1ddef7c6b8195cf038a5e3cee3c64647a70a6e2d1aaaca266 Sep 30 13:06:12 crc kubenswrapper[5002]: I0930 13:06:12.877422 5002 generic.go:334] "Generic (PLEG): container finished" podID="c5eedd99-919b-4ab1-96e4-468fffd6bd2f" containerID="b0f61acd5ab12e7b0ee24e3099b3e54460685043ab833ec5d7f45467b7e85c84" exitCode=0 Sep 30 13:06:12 crc kubenswrapper[5002]: I0930 13:06:12.877515 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fbswt" event={"ID":"c5eedd99-919b-4ab1-96e4-468fffd6bd2f","Type":"ContainerDied","Data":"b0f61acd5ab12e7b0ee24e3099b3e54460685043ab833ec5d7f45467b7e85c84"} Sep 30 13:06:12 crc kubenswrapper[5002]: I0930 13:06:12.877804 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fbswt" event={"ID":"c5eedd99-919b-4ab1-96e4-468fffd6bd2f","Type":"ContainerStarted","Data":"b3257877138a9fd1ddef7c6b8195cf038a5e3cee3c64647a70a6e2d1aaaca266"} Sep 30 13:06:14 crc kubenswrapper[5002]: I0930 13:06:14.898616 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rqfbc" event={"ID":"d184b411-2932-4ace-a20a-b81cdfec713a","Type":"ContainerStarted","Data":"76df807b87d06aff815339071cb9b4a6e77dee1d27a8330b1d040ab7d7f78908"} Sep 30 13:06:15 crc kubenswrapper[5002]: I0930 13:06:15.908567 5002 generic.go:334] "Generic (PLEG): container finished" podID="d184b411-2932-4ace-a20a-b81cdfec713a" containerID="76df807b87d06aff815339071cb9b4a6e77dee1d27a8330b1d040ab7d7f78908" exitCode=0 Sep 30 13:06:15 crc kubenswrapper[5002]: I0930 13:06:15.908678 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rqfbc" event={"ID":"d184b411-2932-4ace-a20a-b81cdfec713a","Type":"ContainerDied","Data":"76df807b87d06aff815339071cb9b4a6e77dee1d27a8330b1d040ab7d7f78908"} Sep 30 13:06:16 crc kubenswrapper[5002]: I0930 13:06:16.918579 5002 generic.go:334] "Generic (PLEG): container finished" podID="c5eedd99-919b-4ab1-96e4-468fffd6bd2f" containerID="4d09c0df386a3cc72794054e6d494d0f32c089952fa1e5a2a7f9d9dca876878c" exitCode=0 Sep 30 13:06:16 crc kubenswrapper[5002]: I0930 13:06:16.918624 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fbswt" event={"ID":"c5eedd99-919b-4ab1-96e4-468fffd6bd2f","Type":"ContainerDied","Data":"4d09c0df386a3cc72794054e6d494d0f32c089952fa1e5a2a7f9d9dca876878c"} Sep 30 13:06:16 crc kubenswrapper[5002]: I0930 13:06:16.921766 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rqfbc" event={"ID":"d184b411-2932-4ace-a20a-b81cdfec713a","Type":"ContainerStarted","Data":"eb3caacb2ec539f1ab87ee199bf4ae3d2efb3e972d9c23871aa8a40bc480bdfa"} Sep 30 13:06:16 crc kubenswrapper[5002]: I0930 13:06:16.961813 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rqfbc" podStartSLOduration=2.401704801 podStartE2EDuration="8.961793566s" podCreationTimestamp="2025-09-30 13:06:08 +0000 UTC" firstStartedPulling="2025-09-30 13:06:09.804583867 +0000 UTC m=+2744.054266023" lastFinishedPulling="2025-09-30 13:06:16.364672642 +0000 UTC m=+2750.614354788" observedRunningTime="2025-09-30 13:06:16.957938137 +0000 UTC m=+2751.207620303" watchObservedRunningTime="2025-09-30 13:06:16.961793566 +0000 UTC m=+2751.211475702" Sep 30 13:06:17 crc kubenswrapper[5002]: I0930 13:06:17.113851 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mh7cn" Sep 30 13:06:17 crc kubenswrapper[5002]: I0930 13:06:17.114721 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mh7cn" Sep 30 13:06:17 crc kubenswrapper[5002]: I0930 13:06:17.934953 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fbswt" event={"ID":"c5eedd99-919b-4ab1-96e4-468fffd6bd2f","Type":"ContainerStarted","Data":"1f5ca53c252d0d3edfb010231a83ae32f1823358b68da57664fc220b3a0ca1e4"} Sep 30 13:06:17 crc kubenswrapper[5002]: I0930 13:06:17.961987 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fbswt" podStartSLOduration=3.499697283 podStartE2EDuration="6.961974032s" podCreationTimestamp="2025-09-30 13:06:11 +0000 UTC" firstStartedPulling="2025-09-30 13:06:14.253742857 +0000 UTC m=+2748.503425003" lastFinishedPulling="2025-09-30 13:06:17.716019606 +0000 UTC m=+2751.965701752" observedRunningTime="2025-09-30 13:06:17.959916808 +0000 UTC m=+2752.209598954" watchObservedRunningTime="2025-09-30 13:06:17.961974032 +0000 UTC m=+2752.211656178" Sep 30 13:06:18 crc kubenswrapper[5002]: I0930 13:06:18.161848 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-mh7cn" podUID="b5658aba-c030-416d-86c6-1708b6854862" containerName="registry-server" probeResult="failure" output=< Sep 30 13:06:18 crc kubenswrapper[5002]: timeout: failed to connect service ":50051" within 1s Sep 30 13:06:18 crc kubenswrapper[5002]: > Sep 30 13:06:18 crc kubenswrapper[5002]: I0930 13:06:18.519710 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rqfbc" Sep 30 13:06:18 crc kubenswrapper[5002]: I0930 13:06:18.519771 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rqfbc" Sep 30 13:06:18 crc kubenswrapper[5002]: I0930 13:06:18.586731 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rqfbc" Sep 30 13:06:21 crc kubenswrapper[5002]: I0930 13:06:21.915904 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fbswt" Sep 30 13:06:21 crc kubenswrapper[5002]: I0930 13:06:21.916193 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fbswt" Sep 30 13:06:21 crc kubenswrapper[5002]: I0930 13:06:21.970860 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fbswt" Sep 30 13:06:27 crc kubenswrapper[5002]: I0930 13:06:27.196812 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mh7cn" Sep 30 13:06:27 crc kubenswrapper[5002]: I0930 13:06:27.278196 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mh7cn" Sep 30 13:06:27 crc kubenswrapper[5002]: I0930 13:06:27.435610 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mh7cn"] Sep 30 13:06:28 crc kubenswrapper[5002]: I0930 13:06:28.573175 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rqfbc" Sep 30 13:06:29 crc kubenswrapper[5002]: I0930 13:06:29.046389 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mh7cn" podUID="b5658aba-c030-416d-86c6-1708b6854862" containerName="registry-server" containerID="cri-o://c67bc6e4f03f0b96438b9b318af94ebe9c720a9a86fd743cfe85f99f8da6305f" gracePeriod=2 Sep 30 13:06:29 crc kubenswrapper[5002]: I0930 13:06:29.512958 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mh7cn" Sep 30 13:06:29 crc kubenswrapper[5002]: I0930 13:06:29.609422 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5658aba-c030-416d-86c6-1708b6854862-catalog-content\") pod \"b5658aba-c030-416d-86c6-1708b6854862\" (UID: \"b5658aba-c030-416d-86c6-1708b6854862\") " Sep 30 13:06:29 crc kubenswrapper[5002]: I0930 13:06:29.610657 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5658aba-c030-416d-86c6-1708b6854862-utilities\") pod \"b5658aba-c030-416d-86c6-1708b6854862\" (UID: \"b5658aba-c030-416d-86c6-1708b6854862\") " Sep 30 13:06:29 crc kubenswrapper[5002]: I0930 13:06:29.610839 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b9gp4\" (UniqueName: \"kubernetes.io/projected/b5658aba-c030-416d-86c6-1708b6854862-kube-api-access-b9gp4\") pod \"b5658aba-c030-416d-86c6-1708b6854862\" (UID: \"b5658aba-c030-416d-86c6-1708b6854862\") " Sep 30 13:06:29 crc kubenswrapper[5002]: I0930 13:06:29.611563 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5658aba-c030-416d-86c6-1708b6854862-utilities" (OuterVolumeSpecName: "utilities") pod "b5658aba-c030-416d-86c6-1708b6854862" (UID: "b5658aba-c030-416d-86c6-1708b6854862"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:06:29 crc kubenswrapper[5002]: I0930 13:06:29.618060 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5658aba-c030-416d-86c6-1708b6854862-kube-api-access-b9gp4" (OuterVolumeSpecName: "kube-api-access-b9gp4") pod "b5658aba-c030-416d-86c6-1708b6854862" (UID: "b5658aba-c030-416d-86c6-1708b6854862"). InnerVolumeSpecName "kube-api-access-b9gp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:06:29 crc kubenswrapper[5002]: I0930 13:06:29.669284 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rqfbc"] Sep 30 13:06:29 crc kubenswrapper[5002]: I0930 13:06:29.684402 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5658aba-c030-416d-86c6-1708b6854862-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b5658aba-c030-416d-86c6-1708b6854862" (UID: "b5658aba-c030-416d-86c6-1708b6854862"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:06:29 crc kubenswrapper[5002]: I0930 13:06:29.717155 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5658aba-c030-416d-86c6-1708b6854862-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 13:06:29 crc kubenswrapper[5002]: I0930 13:06:29.717196 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5658aba-c030-416d-86c6-1708b6854862-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 13:06:29 crc kubenswrapper[5002]: I0930 13:06:29.717222 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b9gp4\" (UniqueName: \"kubernetes.io/projected/b5658aba-c030-416d-86c6-1708b6854862-kube-api-access-b9gp4\") on node \"crc\" DevicePath \"\"" Sep 30 13:06:30 crc kubenswrapper[5002]: I0930 13:06:30.044499 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ssrzt"] Sep 30 13:06:30 crc kubenswrapper[5002]: I0930 13:06:30.045181 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ssrzt" podUID="8db93be7-82da-4e11-bfd0-d5e7d804177c" containerName="registry-server" containerID="cri-o://5817a8fdc38fe6e7086c8b844b364fbeec00e33c972bb3dc60a9a454afbc61ec" gracePeriod=2 Sep 30 13:06:30 crc kubenswrapper[5002]: I0930 13:06:30.082266 5002 generic.go:334] "Generic (PLEG): container finished" podID="b5658aba-c030-416d-86c6-1708b6854862" containerID="c67bc6e4f03f0b96438b9b318af94ebe9c720a9a86fd743cfe85f99f8da6305f" exitCode=0 Sep 30 13:06:30 crc kubenswrapper[5002]: I0930 13:06:30.082330 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mh7cn" event={"ID":"b5658aba-c030-416d-86c6-1708b6854862","Type":"ContainerDied","Data":"c67bc6e4f03f0b96438b9b318af94ebe9c720a9a86fd743cfe85f99f8da6305f"} Sep 30 13:06:30 crc kubenswrapper[5002]: I0930 13:06:30.082368 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mh7cn" event={"ID":"b5658aba-c030-416d-86c6-1708b6854862","Type":"ContainerDied","Data":"5c7ccb561537a30f25bddd18741b62e20aaff806fde5dc56cbde3406e90e47a3"} Sep 30 13:06:30 crc kubenswrapper[5002]: I0930 13:06:30.082395 5002 scope.go:117] "RemoveContainer" containerID="c67bc6e4f03f0b96438b9b318af94ebe9c720a9a86fd743cfe85f99f8da6305f" Sep 30 13:06:30 crc kubenswrapper[5002]: I0930 13:06:30.082637 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mh7cn" Sep 30 13:06:30 crc kubenswrapper[5002]: I0930 13:06:30.110715 5002 scope.go:117] "RemoveContainer" containerID="73b2028c2527e243c80157a58edca433ddfb09646851e23404feeb7fce249c6b" Sep 30 13:06:30 crc kubenswrapper[5002]: I0930 13:06:30.229362 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mh7cn"] Sep 30 13:06:30 crc kubenswrapper[5002]: I0930 13:06:30.236228 5002 scope.go:117] "RemoveContainer" containerID="0c6d9f69b02980151fbfd4349d1a3c8ba41dd70971c34be8e2e90eee4385ef30" Sep 30 13:06:30 crc kubenswrapper[5002]: I0930 13:06:30.239066 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mh7cn"] Sep 30 13:06:30 crc kubenswrapper[5002]: I0930 13:06:30.269784 5002 scope.go:117] "RemoveContainer" containerID="c67bc6e4f03f0b96438b9b318af94ebe9c720a9a86fd743cfe85f99f8da6305f" Sep 30 13:06:30 crc kubenswrapper[5002]: E0930 13:06:30.270382 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c67bc6e4f03f0b96438b9b318af94ebe9c720a9a86fd743cfe85f99f8da6305f\": container with ID starting with c67bc6e4f03f0b96438b9b318af94ebe9c720a9a86fd743cfe85f99f8da6305f not found: ID does not exist" containerID="c67bc6e4f03f0b96438b9b318af94ebe9c720a9a86fd743cfe85f99f8da6305f" Sep 30 13:06:30 crc kubenswrapper[5002]: I0930 13:06:30.270432 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c67bc6e4f03f0b96438b9b318af94ebe9c720a9a86fd743cfe85f99f8da6305f"} err="failed to get container status \"c67bc6e4f03f0b96438b9b318af94ebe9c720a9a86fd743cfe85f99f8da6305f\": rpc error: code = NotFound desc = could not find container \"c67bc6e4f03f0b96438b9b318af94ebe9c720a9a86fd743cfe85f99f8da6305f\": container with ID starting with c67bc6e4f03f0b96438b9b318af94ebe9c720a9a86fd743cfe85f99f8da6305f not found: ID does not exist" Sep 30 13:06:30 crc kubenswrapper[5002]: I0930 13:06:30.270556 5002 scope.go:117] "RemoveContainer" containerID="73b2028c2527e243c80157a58edca433ddfb09646851e23404feeb7fce249c6b" Sep 30 13:06:30 crc kubenswrapper[5002]: E0930 13:06:30.271533 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"73b2028c2527e243c80157a58edca433ddfb09646851e23404feeb7fce249c6b\": container with ID starting with 73b2028c2527e243c80157a58edca433ddfb09646851e23404feeb7fce249c6b not found: ID does not exist" containerID="73b2028c2527e243c80157a58edca433ddfb09646851e23404feeb7fce249c6b" Sep 30 13:06:30 crc kubenswrapper[5002]: I0930 13:06:30.271560 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73b2028c2527e243c80157a58edca433ddfb09646851e23404feeb7fce249c6b"} err="failed to get container status \"73b2028c2527e243c80157a58edca433ddfb09646851e23404feeb7fce249c6b\": rpc error: code = NotFound desc = could not find container \"73b2028c2527e243c80157a58edca433ddfb09646851e23404feeb7fce249c6b\": container with ID starting with 73b2028c2527e243c80157a58edca433ddfb09646851e23404feeb7fce249c6b not found: ID does not exist" Sep 30 13:06:30 crc kubenswrapper[5002]: I0930 13:06:30.271578 5002 scope.go:117] "RemoveContainer" containerID="0c6d9f69b02980151fbfd4349d1a3c8ba41dd70971c34be8e2e90eee4385ef30" Sep 30 13:06:30 crc kubenswrapper[5002]: E0930 13:06:30.271795 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c6d9f69b02980151fbfd4349d1a3c8ba41dd70971c34be8e2e90eee4385ef30\": container with ID starting with 0c6d9f69b02980151fbfd4349d1a3c8ba41dd70971c34be8e2e90eee4385ef30 not found: ID does not exist" containerID="0c6d9f69b02980151fbfd4349d1a3c8ba41dd70971c34be8e2e90eee4385ef30" Sep 30 13:06:30 crc kubenswrapper[5002]: I0930 13:06:30.271811 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c6d9f69b02980151fbfd4349d1a3c8ba41dd70971c34be8e2e90eee4385ef30"} err="failed to get container status \"0c6d9f69b02980151fbfd4349d1a3c8ba41dd70971c34be8e2e90eee4385ef30\": rpc error: code = NotFound desc = could not find container \"0c6d9f69b02980151fbfd4349d1a3c8ba41dd70971c34be8e2e90eee4385ef30\": container with ID starting with 0c6d9f69b02980151fbfd4349d1a3c8ba41dd70971c34be8e2e90eee4385ef30 not found: ID does not exist" Sep 30 13:06:30 crc kubenswrapper[5002]: I0930 13:06:30.621856 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ssrzt" Sep 30 13:06:30 crc kubenswrapper[5002]: I0930 13:06:30.637188 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8db93be7-82da-4e11-bfd0-d5e7d804177c-catalog-content\") pod \"8db93be7-82da-4e11-bfd0-d5e7d804177c\" (UID: \"8db93be7-82da-4e11-bfd0-d5e7d804177c\") " Sep 30 13:06:30 crc kubenswrapper[5002]: I0930 13:06:30.637305 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8db93be7-82da-4e11-bfd0-d5e7d804177c-utilities\") pod \"8db93be7-82da-4e11-bfd0-d5e7d804177c\" (UID: \"8db93be7-82da-4e11-bfd0-d5e7d804177c\") " Sep 30 13:06:30 crc kubenswrapper[5002]: I0930 13:06:30.637433 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6nv9c\" (UniqueName: \"kubernetes.io/projected/8db93be7-82da-4e11-bfd0-d5e7d804177c-kube-api-access-6nv9c\") pod \"8db93be7-82da-4e11-bfd0-d5e7d804177c\" (UID: \"8db93be7-82da-4e11-bfd0-d5e7d804177c\") " Sep 30 13:06:30 crc kubenswrapper[5002]: I0930 13:06:30.640688 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8db93be7-82da-4e11-bfd0-d5e7d804177c-utilities" (OuterVolumeSpecName: "utilities") pod "8db93be7-82da-4e11-bfd0-d5e7d804177c" (UID: "8db93be7-82da-4e11-bfd0-d5e7d804177c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:06:30 crc kubenswrapper[5002]: I0930 13:06:30.644559 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8db93be7-82da-4e11-bfd0-d5e7d804177c-kube-api-access-6nv9c" (OuterVolumeSpecName: "kube-api-access-6nv9c") pod "8db93be7-82da-4e11-bfd0-d5e7d804177c" (UID: "8db93be7-82da-4e11-bfd0-d5e7d804177c"). InnerVolumeSpecName "kube-api-access-6nv9c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:06:30 crc kubenswrapper[5002]: I0930 13:06:30.687214 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5658aba-c030-416d-86c6-1708b6854862" path="/var/lib/kubelet/pods/b5658aba-c030-416d-86c6-1708b6854862/volumes" Sep 30 13:06:30 crc kubenswrapper[5002]: I0930 13:06:30.711389 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8db93be7-82da-4e11-bfd0-d5e7d804177c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8db93be7-82da-4e11-bfd0-d5e7d804177c" (UID: "8db93be7-82da-4e11-bfd0-d5e7d804177c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:06:30 crc kubenswrapper[5002]: I0930 13:06:30.740152 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8db93be7-82da-4e11-bfd0-d5e7d804177c-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 13:06:30 crc kubenswrapper[5002]: I0930 13:06:30.740192 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8db93be7-82da-4e11-bfd0-d5e7d804177c-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 13:06:30 crc kubenswrapper[5002]: I0930 13:06:30.740205 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6nv9c\" (UniqueName: \"kubernetes.io/projected/8db93be7-82da-4e11-bfd0-d5e7d804177c-kube-api-access-6nv9c\") on node \"crc\" DevicePath \"\"" Sep 30 13:06:31 crc kubenswrapper[5002]: I0930 13:06:31.095422 5002 generic.go:334] "Generic (PLEG): container finished" podID="8db93be7-82da-4e11-bfd0-d5e7d804177c" containerID="5817a8fdc38fe6e7086c8b844b364fbeec00e33c972bb3dc60a9a454afbc61ec" exitCode=0 Sep 30 13:06:31 crc kubenswrapper[5002]: I0930 13:06:31.095533 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ssrzt" Sep 30 13:06:31 crc kubenswrapper[5002]: I0930 13:06:31.095493 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ssrzt" event={"ID":"8db93be7-82da-4e11-bfd0-d5e7d804177c","Type":"ContainerDied","Data":"5817a8fdc38fe6e7086c8b844b364fbeec00e33c972bb3dc60a9a454afbc61ec"} Sep 30 13:06:31 crc kubenswrapper[5002]: I0930 13:06:31.095613 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ssrzt" event={"ID":"8db93be7-82da-4e11-bfd0-d5e7d804177c","Type":"ContainerDied","Data":"5d84044748db94ad077e935d6cf4b6263fd129096ced8f88b958b1229ef2f172"} Sep 30 13:06:31 crc kubenswrapper[5002]: I0930 13:06:31.095649 5002 scope.go:117] "RemoveContainer" containerID="5817a8fdc38fe6e7086c8b844b364fbeec00e33c972bb3dc60a9a454afbc61ec" Sep 30 13:06:31 crc kubenswrapper[5002]: I0930 13:06:31.127307 5002 scope.go:117] "RemoveContainer" containerID="3aefff65f606af8d90946231508d9f7fe66d6ef8a730a94b21c6daea216e8913" Sep 30 13:06:31 crc kubenswrapper[5002]: I0930 13:06:31.128444 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ssrzt"] Sep 30 13:06:31 crc kubenswrapper[5002]: I0930 13:06:31.137125 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ssrzt"] Sep 30 13:06:31 crc kubenswrapper[5002]: I0930 13:06:31.158139 5002 scope.go:117] "RemoveContainer" containerID="42515d05952c343c9e7a2eb093f0e805bb018518648b57cbfab706553fce5dbd" Sep 30 13:06:31 crc kubenswrapper[5002]: I0930 13:06:31.237192 5002 scope.go:117] "RemoveContainer" containerID="5817a8fdc38fe6e7086c8b844b364fbeec00e33c972bb3dc60a9a454afbc61ec" Sep 30 13:06:31 crc kubenswrapper[5002]: E0930 13:06:31.237671 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5817a8fdc38fe6e7086c8b844b364fbeec00e33c972bb3dc60a9a454afbc61ec\": container with ID starting with 5817a8fdc38fe6e7086c8b844b364fbeec00e33c972bb3dc60a9a454afbc61ec not found: ID does not exist" containerID="5817a8fdc38fe6e7086c8b844b364fbeec00e33c972bb3dc60a9a454afbc61ec" Sep 30 13:06:31 crc kubenswrapper[5002]: I0930 13:06:31.237709 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5817a8fdc38fe6e7086c8b844b364fbeec00e33c972bb3dc60a9a454afbc61ec"} err="failed to get container status \"5817a8fdc38fe6e7086c8b844b364fbeec00e33c972bb3dc60a9a454afbc61ec\": rpc error: code = NotFound desc = could not find container \"5817a8fdc38fe6e7086c8b844b364fbeec00e33c972bb3dc60a9a454afbc61ec\": container with ID starting with 5817a8fdc38fe6e7086c8b844b364fbeec00e33c972bb3dc60a9a454afbc61ec not found: ID does not exist" Sep 30 13:06:31 crc kubenswrapper[5002]: I0930 13:06:31.237753 5002 scope.go:117] "RemoveContainer" containerID="3aefff65f606af8d90946231508d9f7fe66d6ef8a730a94b21c6daea216e8913" Sep 30 13:06:31 crc kubenswrapper[5002]: E0930 13:06:31.238013 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3aefff65f606af8d90946231508d9f7fe66d6ef8a730a94b21c6daea216e8913\": container with ID starting with 3aefff65f606af8d90946231508d9f7fe66d6ef8a730a94b21c6daea216e8913 not found: ID does not exist" containerID="3aefff65f606af8d90946231508d9f7fe66d6ef8a730a94b21c6daea216e8913" Sep 30 13:06:31 crc kubenswrapper[5002]: I0930 13:06:31.238096 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3aefff65f606af8d90946231508d9f7fe66d6ef8a730a94b21c6daea216e8913"} err="failed to get container status \"3aefff65f606af8d90946231508d9f7fe66d6ef8a730a94b21c6daea216e8913\": rpc error: code = NotFound desc = could not find container \"3aefff65f606af8d90946231508d9f7fe66d6ef8a730a94b21c6daea216e8913\": container with ID starting with 3aefff65f606af8d90946231508d9f7fe66d6ef8a730a94b21c6daea216e8913 not found: ID does not exist" Sep 30 13:06:31 crc kubenswrapper[5002]: I0930 13:06:31.238130 5002 scope.go:117] "RemoveContainer" containerID="42515d05952c343c9e7a2eb093f0e805bb018518648b57cbfab706553fce5dbd" Sep 30 13:06:31 crc kubenswrapper[5002]: E0930 13:06:31.238456 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42515d05952c343c9e7a2eb093f0e805bb018518648b57cbfab706553fce5dbd\": container with ID starting with 42515d05952c343c9e7a2eb093f0e805bb018518648b57cbfab706553fce5dbd not found: ID does not exist" containerID="42515d05952c343c9e7a2eb093f0e805bb018518648b57cbfab706553fce5dbd" Sep 30 13:06:31 crc kubenswrapper[5002]: I0930 13:06:31.238529 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42515d05952c343c9e7a2eb093f0e805bb018518648b57cbfab706553fce5dbd"} err="failed to get container status \"42515d05952c343c9e7a2eb093f0e805bb018518648b57cbfab706553fce5dbd\": rpc error: code = NotFound desc = could not find container \"42515d05952c343c9e7a2eb093f0e805bb018518648b57cbfab706553fce5dbd\": container with ID starting with 42515d05952c343c9e7a2eb093f0e805bb018518648b57cbfab706553fce5dbd not found: ID does not exist" Sep 30 13:06:31 crc kubenswrapper[5002]: I0930 13:06:31.992665 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fbswt" Sep 30 13:06:32 crc kubenswrapper[5002]: I0930 13:06:32.685617 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8db93be7-82da-4e11-bfd0-d5e7d804177c" path="/var/lib/kubelet/pods/8db93be7-82da-4e11-bfd0-d5e7d804177c/volumes" Sep 30 13:06:34 crc kubenswrapper[5002]: I0930 13:06:34.436750 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fbswt"] Sep 30 13:06:34 crc kubenswrapper[5002]: I0930 13:06:34.437425 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fbswt" podUID="c5eedd99-919b-4ab1-96e4-468fffd6bd2f" containerName="registry-server" containerID="cri-o://1f5ca53c252d0d3edfb010231a83ae32f1823358b68da57664fc220b3a0ca1e4" gracePeriod=2 Sep 30 13:06:34 crc kubenswrapper[5002]: I0930 13:06:34.946113 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fbswt" Sep 30 13:06:35 crc kubenswrapper[5002]: I0930 13:06:35.015912 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5eedd99-919b-4ab1-96e4-468fffd6bd2f-catalog-content\") pod \"c5eedd99-919b-4ab1-96e4-468fffd6bd2f\" (UID: \"c5eedd99-919b-4ab1-96e4-468fffd6bd2f\") " Sep 30 13:06:35 crc kubenswrapper[5002]: I0930 13:06:35.016035 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qt2md\" (UniqueName: \"kubernetes.io/projected/c5eedd99-919b-4ab1-96e4-468fffd6bd2f-kube-api-access-qt2md\") pod \"c5eedd99-919b-4ab1-96e4-468fffd6bd2f\" (UID: \"c5eedd99-919b-4ab1-96e4-468fffd6bd2f\") " Sep 30 13:06:35 crc kubenswrapper[5002]: I0930 13:06:35.016092 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5eedd99-919b-4ab1-96e4-468fffd6bd2f-utilities\") pod \"c5eedd99-919b-4ab1-96e4-468fffd6bd2f\" (UID: \"c5eedd99-919b-4ab1-96e4-468fffd6bd2f\") " Sep 30 13:06:35 crc kubenswrapper[5002]: I0930 13:06:35.017047 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5eedd99-919b-4ab1-96e4-468fffd6bd2f-utilities" (OuterVolumeSpecName: "utilities") pod "c5eedd99-919b-4ab1-96e4-468fffd6bd2f" (UID: "c5eedd99-919b-4ab1-96e4-468fffd6bd2f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:06:35 crc kubenswrapper[5002]: I0930 13:06:35.027930 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5eedd99-919b-4ab1-96e4-468fffd6bd2f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c5eedd99-919b-4ab1-96e4-468fffd6bd2f" (UID: "c5eedd99-919b-4ab1-96e4-468fffd6bd2f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:06:35 crc kubenswrapper[5002]: I0930 13:06:35.036207 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5eedd99-919b-4ab1-96e4-468fffd6bd2f-kube-api-access-qt2md" (OuterVolumeSpecName: "kube-api-access-qt2md") pod "c5eedd99-919b-4ab1-96e4-468fffd6bd2f" (UID: "c5eedd99-919b-4ab1-96e4-468fffd6bd2f"). InnerVolumeSpecName "kube-api-access-qt2md". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:06:35 crc kubenswrapper[5002]: I0930 13:06:35.118717 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qt2md\" (UniqueName: \"kubernetes.io/projected/c5eedd99-919b-4ab1-96e4-468fffd6bd2f-kube-api-access-qt2md\") on node \"crc\" DevicePath \"\"" Sep 30 13:06:35 crc kubenswrapper[5002]: I0930 13:06:35.118752 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5eedd99-919b-4ab1-96e4-468fffd6bd2f-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 13:06:35 crc kubenswrapper[5002]: I0930 13:06:35.118762 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5eedd99-919b-4ab1-96e4-468fffd6bd2f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 13:06:35 crc kubenswrapper[5002]: I0930 13:06:35.134131 5002 generic.go:334] "Generic (PLEG): container finished" podID="c5eedd99-919b-4ab1-96e4-468fffd6bd2f" containerID="1f5ca53c252d0d3edfb010231a83ae32f1823358b68da57664fc220b3a0ca1e4" exitCode=0 Sep 30 13:06:35 crc kubenswrapper[5002]: I0930 13:06:35.134199 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fbswt" event={"ID":"c5eedd99-919b-4ab1-96e4-468fffd6bd2f","Type":"ContainerDied","Data":"1f5ca53c252d0d3edfb010231a83ae32f1823358b68da57664fc220b3a0ca1e4"} Sep 30 13:06:35 crc kubenswrapper[5002]: I0930 13:06:35.134248 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fbswt" event={"ID":"c5eedd99-919b-4ab1-96e4-468fffd6bd2f","Type":"ContainerDied","Data":"b3257877138a9fd1ddef7c6b8195cf038a5e3cee3c64647a70a6e2d1aaaca266"} Sep 30 13:06:35 crc kubenswrapper[5002]: I0930 13:06:35.134279 5002 scope.go:117] "RemoveContainer" containerID="1f5ca53c252d0d3edfb010231a83ae32f1823358b68da57664fc220b3a0ca1e4" Sep 30 13:06:35 crc kubenswrapper[5002]: I0930 13:06:35.134213 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fbswt" Sep 30 13:06:35 crc kubenswrapper[5002]: I0930 13:06:35.155422 5002 scope.go:117] "RemoveContainer" containerID="4d09c0df386a3cc72794054e6d494d0f32c089952fa1e5a2a7f9d9dca876878c" Sep 30 13:06:35 crc kubenswrapper[5002]: I0930 13:06:35.177377 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fbswt"] Sep 30 13:06:35 crc kubenswrapper[5002]: I0930 13:06:35.184805 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fbswt"] Sep 30 13:06:35 crc kubenswrapper[5002]: I0930 13:06:35.186097 5002 scope.go:117] "RemoveContainer" containerID="b0f61acd5ab12e7b0ee24e3099b3e54460685043ab833ec5d7f45467b7e85c84" Sep 30 13:06:35 crc kubenswrapper[5002]: I0930 13:06:35.229362 5002 scope.go:117] "RemoveContainer" containerID="1f5ca53c252d0d3edfb010231a83ae32f1823358b68da57664fc220b3a0ca1e4" Sep 30 13:06:35 crc kubenswrapper[5002]: E0930 13:06:35.230160 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f5ca53c252d0d3edfb010231a83ae32f1823358b68da57664fc220b3a0ca1e4\": container with ID starting with 1f5ca53c252d0d3edfb010231a83ae32f1823358b68da57664fc220b3a0ca1e4 not found: ID does not exist" containerID="1f5ca53c252d0d3edfb010231a83ae32f1823358b68da57664fc220b3a0ca1e4" Sep 30 13:06:35 crc kubenswrapper[5002]: I0930 13:06:35.230200 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f5ca53c252d0d3edfb010231a83ae32f1823358b68da57664fc220b3a0ca1e4"} err="failed to get container status \"1f5ca53c252d0d3edfb010231a83ae32f1823358b68da57664fc220b3a0ca1e4\": rpc error: code = NotFound desc = could not find container \"1f5ca53c252d0d3edfb010231a83ae32f1823358b68da57664fc220b3a0ca1e4\": container with ID starting with 1f5ca53c252d0d3edfb010231a83ae32f1823358b68da57664fc220b3a0ca1e4 not found: ID does not exist" Sep 30 13:06:35 crc kubenswrapper[5002]: I0930 13:06:35.230221 5002 scope.go:117] "RemoveContainer" containerID="4d09c0df386a3cc72794054e6d494d0f32c089952fa1e5a2a7f9d9dca876878c" Sep 30 13:06:35 crc kubenswrapper[5002]: E0930 13:06:35.230586 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d09c0df386a3cc72794054e6d494d0f32c089952fa1e5a2a7f9d9dca876878c\": container with ID starting with 4d09c0df386a3cc72794054e6d494d0f32c089952fa1e5a2a7f9d9dca876878c not found: ID does not exist" containerID="4d09c0df386a3cc72794054e6d494d0f32c089952fa1e5a2a7f9d9dca876878c" Sep 30 13:06:35 crc kubenswrapper[5002]: I0930 13:06:35.230618 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d09c0df386a3cc72794054e6d494d0f32c089952fa1e5a2a7f9d9dca876878c"} err="failed to get container status \"4d09c0df386a3cc72794054e6d494d0f32c089952fa1e5a2a7f9d9dca876878c\": rpc error: code = NotFound desc = could not find container \"4d09c0df386a3cc72794054e6d494d0f32c089952fa1e5a2a7f9d9dca876878c\": container with ID starting with 4d09c0df386a3cc72794054e6d494d0f32c089952fa1e5a2a7f9d9dca876878c not found: ID does not exist" Sep 30 13:06:35 crc kubenswrapper[5002]: I0930 13:06:35.230636 5002 scope.go:117] "RemoveContainer" containerID="b0f61acd5ab12e7b0ee24e3099b3e54460685043ab833ec5d7f45467b7e85c84" Sep 30 13:06:35 crc kubenswrapper[5002]: E0930 13:06:35.230975 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0f61acd5ab12e7b0ee24e3099b3e54460685043ab833ec5d7f45467b7e85c84\": container with ID starting with b0f61acd5ab12e7b0ee24e3099b3e54460685043ab833ec5d7f45467b7e85c84 not found: ID does not exist" containerID="b0f61acd5ab12e7b0ee24e3099b3e54460685043ab833ec5d7f45467b7e85c84" Sep 30 13:06:35 crc kubenswrapper[5002]: I0930 13:06:35.231004 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0f61acd5ab12e7b0ee24e3099b3e54460685043ab833ec5d7f45467b7e85c84"} err="failed to get container status \"b0f61acd5ab12e7b0ee24e3099b3e54460685043ab833ec5d7f45467b7e85c84\": rpc error: code = NotFound desc = could not find container \"b0f61acd5ab12e7b0ee24e3099b3e54460685043ab833ec5d7f45467b7e85c84\": container with ID starting with b0f61acd5ab12e7b0ee24e3099b3e54460685043ab833ec5d7f45467b7e85c84 not found: ID does not exist" Sep 30 13:06:36 crc kubenswrapper[5002]: I0930 13:06:36.693830 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5eedd99-919b-4ab1-96e4-468fffd6bd2f" path="/var/lib/kubelet/pods/c5eedd99-919b-4ab1-96e4-468fffd6bd2f/volumes" Sep 30 13:07:02 crc kubenswrapper[5002]: I0930 13:07:02.097827 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:07:02 crc kubenswrapper[5002]: I0930 13:07:02.098293 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:07:32 crc kubenswrapper[5002]: I0930 13:07:32.101103 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:07:32 crc kubenswrapper[5002]: I0930 13:07:32.101914 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:08:02 crc kubenswrapper[5002]: I0930 13:08:02.098450 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:08:02 crc kubenswrapper[5002]: I0930 13:08:02.099037 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:08:02 crc kubenswrapper[5002]: I0930 13:08:02.099093 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" Sep 30 13:08:02 crc kubenswrapper[5002]: I0930 13:08:02.100028 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98"} pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 13:08:02 crc kubenswrapper[5002]: I0930 13:08:02.100110 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" containerID="cri-o://1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98" gracePeriod=600 Sep 30 13:08:02 crc kubenswrapper[5002]: E0930 13:08:02.231446 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:08:03 crc kubenswrapper[5002]: I0930 13:08:03.019202 5002 generic.go:334] "Generic (PLEG): container finished" podID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerID="1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98" exitCode=0 Sep 30 13:08:03 crc kubenswrapper[5002]: I0930 13:08:03.019288 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerDied","Data":"1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98"} Sep 30 13:08:03 crc kubenswrapper[5002]: I0930 13:08:03.019613 5002 scope.go:117] "RemoveContainer" containerID="2171ea1d3e69548aa803f0251f328d7b726c5d0d03df99465807718d28b0c056" Sep 30 13:08:03 crc kubenswrapper[5002]: I0930 13:08:03.020643 5002 scope.go:117] "RemoveContainer" containerID="1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98" Sep 30 13:08:03 crc kubenswrapper[5002]: E0930 13:08:03.021150 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:08:15 crc kubenswrapper[5002]: I0930 13:08:15.676112 5002 scope.go:117] "RemoveContainer" containerID="1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98" Sep 30 13:08:15 crc kubenswrapper[5002]: E0930 13:08:15.676863 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:08:28 crc kubenswrapper[5002]: I0930 13:08:28.676172 5002 scope.go:117] "RemoveContainer" containerID="1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98" Sep 30 13:08:28 crc kubenswrapper[5002]: E0930 13:08:28.676901 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:08:40 crc kubenswrapper[5002]: I0930 13:08:40.676139 5002 scope.go:117] "RemoveContainer" containerID="1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98" Sep 30 13:08:40 crc kubenswrapper[5002]: E0930 13:08:40.677271 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:08:54 crc kubenswrapper[5002]: I0930 13:08:54.676203 5002 scope.go:117] "RemoveContainer" containerID="1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98" Sep 30 13:08:54 crc kubenswrapper[5002]: E0930 13:08:54.676896 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:09:05 crc kubenswrapper[5002]: I0930 13:09:05.676150 5002 scope.go:117] "RemoveContainer" containerID="1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98" Sep 30 13:09:05 crc kubenswrapper[5002]: E0930 13:09:05.676911 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:09:17 crc kubenswrapper[5002]: I0930 13:09:17.676341 5002 scope.go:117] "RemoveContainer" containerID="1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98" Sep 30 13:09:17 crc kubenswrapper[5002]: E0930 13:09:17.677435 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:09:32 crc kubenswrapper[5002]: I0930 13:09:32.676756 5002 scope.go:117] "RemoveContainer" containerID="1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98" Sep 30 13:09:32 crc kubenswrapper[5002]: E0930 13:09:32.677643 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:09:45 crc kubenswrapper[5002]: I0930 13:09:45.676579 5002 scope.go:117] "RemoveContainer" containerID="1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98" Sep 30 13:09:45 crc kubenswrapper[5002]: E0930 13:09:45.678328 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:09:58 crc kubenswrapper[5002]: I0930 13:09:58.676355 5002 scope.go:117] "RemoveContainer" containerID="1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98" Sep 30 13:09:58 crc kubenswrapper[5002]: E0930 13:09:58.677448 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:10:13 crc kubenswrapper[5002]: I0930 13:10:13.676922 5002 scope.go:117] "RemoveContainer" containerID="1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98" Sep 30 13:10:13 crc kubenswrapper[5002]: E0930 13:10:13.677794 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:10:28 crc kubenswrapper[5002]: I0930 13:10:28.676366 5002 scope.go:117] "RemoveContainer" containerID="1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98" Sep 30 13:10:28 crc kubenswrapper[5002]: E0930 13:10:28.677048 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:10:40 crc kubenswrapper[5002]: I0930 13:10:40.677032 5002 scope.go:117] "RemoveContainer" containerID="1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98" Sep 30 13:10:40 crc kubenswrapper[5002]: E0930 13:10:40.677909 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:10:53 crc kubenswrapper[5002]: I0930 13:10:53.676555 5002 scope.go:117] "RemoveContainer" containerID="1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98" Sep 30 13:10:53 crc kubenswrapper[5002]: E0930 13:10:53.677355 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:11:06 crc kubenswrapper[5002]: I0930 13:11:06.683139 5002 scope.go:117] "RemoveContainer" containerID="1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98" Sep 30 13:11:06 crc kubenswrapper[5002]: E0930 13:11:06.684077 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:11:21 crc kubenswrapper[5002]: I0930 13:11:21.676307 5002 scope.go:117] "RemoveContainer" containerID="1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98" Sep 30 13:11:21 crc kubenswrapper[5002]: E0930 13:11:21.677129 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:11:34 crc kubenswrapper[5002]: I0930 13:11:34.676984 5002 scope.go:117] "RemoveContainer" containerID="1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98" Sep 30 13:11:34 crc kubenswrapper[5002]: E0930 13:11:34.677949 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:11:45 crc kubenswrapper[5002]: I0930 13:11:45.676577 5002 scope.go:117] "RemoveContainer" containerID="1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98" Sep 30 13:11:45 crc kubenswrapper[5002]: E0930 13:11:45.677295 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:11:58 crc kubenswrapper[5002]: I0930 13:11:58.676373 5002 scope.go:117] "RemoveContainer" containerID="1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98" Sep 30 13:11:58 crc kubenswrapper[5002]: E0930 13:11:58.677220 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:12:12 crc kubenswrapper[5002]: I0930 13:12:12.676822 5002 scope.go:117] "RemoveContainer" containerID="1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98" Sep 30 13:12:12 crc kubenswrapper[5002]: E0930 13:12:12.678094 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:12:24 crc kubenswrapper[5002]: I0930 13:12:24.676627 5002 scope.go:117] "RemoveContainer" containerID="1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98" Sep 30 13:12:24 crc kubenswrapper[5002]: E0930 13:12:24.677458 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:12:39 crc kubenswrapper[5002]: I0930 13:12:39.676136 5002 scope.go:117] "RemoveContainer" containerID="1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98" Sep 30 13:12:39 crc kubenswrapper[5002]: E0930 13:12:39.676841 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:12:52 crc kubenswrapper[5002]: I0930 13:12:52.676699 5002 scope.go:117] "RemoveContainer" containerID="1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98" Sep 30 13:12:52 crc kubenswrapper[5002]: E0930 13:12:52.677589 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:13:03 crc kubenswrapper[5002]: I0930 13:13:03.676637 5002 scope.go:117] "RemoveContainer" containerID="1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98" Sep 30 13:13:04 crc kubenswrapper[5002]: I0930 13:13:04.857028 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerStarted","Data":"563c0f60a2e42abd33256343ccd18a018715598f4986d0dbab88b973fb61a222"} Sep 30 13:15:00 crc kubenswrapper[5002]: I0930 13:15:00.179300 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320635-5lczz"] Sep 30 13:15:00 crc kubenswrapper[5002]: E0930 13:15:00.180262 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5658aba-c030-416d-86c6-1708b6854862" containerName="extract-utilities" Sep 30 13:15:00 crc kubenswrapper[5002]: I0930 13:15:00.180276 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5658aba-c030-416d-86c6-1708b6854862" containerName="extract-utilities" Sep 30 13:15:00 crc kubenswrapper[5002]: E0930 13:15:00.180290 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5eedd99-919b-4ab1-96e4-468fffd6bd2f" containerName="extract-content" Sep 30 13:15:00 crc kubenswrapper[5002]: I0930 13:15:00.180297 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5eedd99-919b-4ab1-96e4-468fffd6bd2f" containerName="extract-content" Sep 30 13:15:00 crc kubenswrapper[5002]: E0930 13:15:00.180307 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5658aba-c030-416d-86c6-1708b6854862" containerName="registry-server" Sep 30 13:15:00 crc kubenswrapper[5002]: I0930 13:15:00.180315 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5658aba-c030-416d-86c6-1708b6854862" containerName="registry-server" Sep 30 13:15:00 crc kubenswrapper[5002]: E0930 13:15:00.180328 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8db93be7-82da-4e11-bfd0-d5e7d804177c" containerName="registry-server" Sep 30 13:15:00 crc kubenswrapper[5002]: I0930 13:15:00.180334 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="8db93be7-82da-4e11-bfd0-d5e7d804177c" containerName="registry-server" Sep 30 13:15:00 crc kubenswrapper[5002]: E0930 13:15:00.180344 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8db93be7-82da-4e11-bfd0-d5e7d804177c" containerName="extract-utilities" Sep 30 13:15:00 crc kubenswrapper[5002]: I0930 13:15:00.180351 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="8db93be7-82da-4e11-bfd0-d5e7d804177c" containerName="extract-utilities" Sep 30 13:15:00 crc kubenswrapper[5002]: E0930 13:15:00.180377 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5eedd99-919b-4ab1-96e4-468fffd6bd2f" containerName="extract-utilities" Sep 30 13:15:00 crc kubenswrapper[5002]: I0930 13:15:00.180384 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5eedd99-919b-4ab1-96e4-468fffd6bd2f" containerName="extract-utilities" Sep 30 13:15:00 crc kubenswrapper[5002]: E0930 13:15:00.180394 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5eedd99-919b-4ab1-96e4-468fffd6bd2f" containerName="registry-server" Sep 30 13:15:00 crc kubenswrapper[5002]: I0930 13:15:00.180400 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5eedd99-919b-4ab1-96e4-468fffd6bd2f" containerName="registry-server" Sep 30 13:15:00 crc kubenswrapper[5002]: E0930 13:15:00.180409 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8db93be7-82da-4e11-bfd0-d5e7d804177c" containerName="extract-content" Sep 30 13:15:00 crc kubenswrapper[5002]: I0930 13:15:00.180415 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="8db93be7-82da-4e11-bfd0-d5e7d804177c" containerName="extract-content" Sep 30 13:15:00 crc kubenswrapper[5002]: E0930 13:15:00.180424 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5658aba-c030-416d-86c6-1708b6854862" containerName="extract-content" Sep 30 13:15:00 crc kubenswrapper[5002]: I0930 13:15:00.180431 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5658aba-c030-416d-86c6-1708b6854862" containerName="extract-content" Sep 30 13:15:00 crc kubenswrapper[5002]: I0930 13:15:00.180647 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5eedd99-919b-4ab1-96e4-468fffd6bd2f" containerName="registry-server" Sep 30 13:15:00 crc kubenswrapper[5002]: I0930 13:15:00.180669 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="8db93be7-82da-4e11-bfd0-d5e7d804177c" containerName="registry-server" Sep 30 13:15:00 crc kubenswrapper[5002]: I0930 13:15:00.180686 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5658aba-c030-416d-86c6-1708b6854862" containerName="registry-server" Sep 30 13:15:00 crc kubenswrapper[5002]: I0930 13:15:00.181426 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320635-5lczz" Sep 30 13:15:00 crc kubenswrapper[5002]: I0930 13:15:00.184081 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 13:15:00 crc kubenswrapper[5002]: I0930 13:15:00.185149 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 13:15:00 crc kubenswrapper[5002]: I0930 13:15:00.192743 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320635-5lczz"] Sep 30 13:15:00 crc kubenswrapper[5002]: I0930 13:15:00.255345 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8-config-volume\") pod \"collect-profiles-29320635-5lczz\" (UID: \"f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320635-5lczz" Sep 30 13:15:00 crc kubenswrapper[5002]: I0930 13:15:00.255411 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmtfz\" (UniqueName: \"kubernetes.io/projected/f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8-kube-api-access-jmtfz\") pod \"collect-profiles-29320635-5lczz\" (UID: \"f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320635-5lczz" Sep 30 13:15:00 crc kubenswrapper[5002]: I0930 13:15:00.255505 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8-secret-volume\") pod \"collect-profiles-29320635-5lczz\" (UID: \"f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320635-5lczz" Sep 30 13:15:00 crc kubenswrapper[5002]: I0930 13:15:00.357172 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8-config-volume\") pod \"collect-profiles-29320635-5lczz\" (UID: \"f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320635-5lczz" Sep 30 13:15:00 crc kubenswrapper[5002]: I0930 13:15:00.357609 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmtfz\" (UniqueName: \"kubernetes.io/projected/f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8-kube-api-access-jmtfz\") pod \"collect-profiles-29320635-5lczz\" (UID: \"f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320635-5lczz" Sep 30 13:15:00 crc kubenswrapper[5002]: I0930 13:15:00.357758 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8-secret-volume\") pod \"collect-profiles-29320635-5lczz\" (UID: \"f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320635-5lczz" Sep 30 13:15:00 crc kubenswrapper[5002]: I0930 13:15:00.358252 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8-config-volume\") pod \"collect-profiles-29320635-5lczz\" (UID: \"f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320635-5lczz" Sep 30 13:15:00 crc kubenswrapper[5002]: I0930 13:15:00.364290 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8-secret-volume\") pod \"collect-profiles-29320635-5lczz\" (UID: \"f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320635-5lczz" Sep 30 13:15:00 crc kubenswrapper[5002]: I0930 13:15:00.389339 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmtfz\" (UniqueName: \"kubernetes.io/projected/f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8-kube-api-access-jmtfz\") pod \"collect-profiles-29320635-5lczz\" (UID: \"f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320635-5lczz" Sep 30 13:15:00 crc kubenswrapper[5002]: I0930 13:15:00.502059 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320635-5lczz" Sep 30 13:15:00 crc kubenswrapper[5002]: I0930 13:15:00.928122 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320635-5lczz"] Sep 30 13:15:01 crc kubenswrapper[5002]: I0930 13:15:01.894398 5002 generic.go:334] "Generic (PLEG): container finished" podID="f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8" containerID="fe655df1e2f40868848cb90324c37669163a5d85a8e06a93b296e51cc4fe6654" exitCode=0 Sep 30 13:15:01 crc kubenswrapper[5002]: I0930 13:15:01.894541 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320635-5lczz" event={"ID":"f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8","Type":"ContainerDied","Data":"fe655df1e2f40868848cb90324c37669163a5d85a8e06a93b296e51cc4fe6654"} Sep 30 13:15:01 crc kubenswrapper[5002]: I0930 13:15:01.894772 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320635-5lczz" event={"ID":"f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8","Type":"ContainerStarted","Data":"ec4a06852ca5393cf4db8b5dbb6a27177f4fa0190ff6c55a72a033888a74996f"} Sep 30 13:15:02 crc kubenswrapper[5002]: I0930 13:15:02.026790 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tvsq5"] Sep 30 13:15:02 crc kubenswrapper[5002]: I0930 13:15:02.029324 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tvsq5" Sep 30 13:15:02 crc kubenswrapper[5002]: I0930 13:15:02.037758 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tvsq5"] Sep 30 13:15:02 crc kubenswrapper[5002]: I0930 13:15:02.089993 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/795e98c6-49c0-4e5c-8526-85039800ebcc-utilities\") pod \"redhat-operators-tvsq5\" (UID: \"795e98c6-49c0-4e5c-8526-85039800ebcc\") " pod="openshift-marketplace/redhat-operators-tvsq5" Sep 30 13:15:02 crc kubenswrapper[5002]: I0930 13:15:02.090180 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/795e98c6-49c0-4e5c-8526-85039800ebcc-catalog-content\") pod \"redhat-operators-tvsq5\" (UID: \"795e98c6-49c0-4e5c-8526-85039800ebcc\") " pod="openshift-marketplace/redhat-operators-tvsq5" Sep 30 13:15:02 crc kubenswrapper[5002]: I0930 13:15:02.090454 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqcdh\" (UniqueName: \"kubernetes.io/projected/795e98c6-49c0-4e5c-8526-85039800ebcc-kube-api-access-nqcdh\") pod \"redhat-operators-tvsq5\" (UID: \"795e98c6-49c0-4e5c-8526-85039800ebcc\") " pod="openshift-marketplace/redhat-operators-tvsq5" Sep 30 13:15:02 crc kubenswrapper[5002]: I0930 13:15:02.192661 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqcdh\" (UniqueName: \"kubernetes.io/projected/795e98c6-49c0-4e5c-8526-85039800ebcc-kube-api-access-nqcdh\") pod \"redhat-operators-tvsq5\" (UID: \"795e98c6-49c0-4e5c-8526-85039800ebcc\") " pod="openshift-marketplace/redhat-operators-tvsq5" Sep 30 13:15:02 crc kubenswrapper[5002]: I0930 13:15:02.193129 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/795e98c6-49c0-4e5c-8526-85039800ebcc-utilities\") pod \"redhat-operators-tvsq5\" (UID: \"795e98c6-49c0-4e5c-8526-85039800ebcc\") " pod="openshift-marketplace/redhat-operators-tvsq5" Sep 30 13:15:02 crc kubenswrapper[5002]: I0930 13:15:02.193182 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/795e98c6-49c0-4e5c-8526-85039800ebcc-catalog-content\") pod \"redhat-operators-tvsq5\" (UID: \"795e98c6-49c0-4e5c-8526-85039800ebcc\") " pod="openshift-marketplace/redhat-operators-tvsq5" Sep 30 13:15:02 crc kubenswrapper[5002]: I0930 13:15:02.193616 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/795e98c6-49c0-4e5c-8526-85039800ebcc-utilities\") pod \"redhat-operators-tvsq5\" (UID: \"795e98c6-49c0-4e5c-8526-85039800ebcc\") " pod="openshift-marketplace/redhat-operators-tvsq5" Sep 30 13:15:02 crc kubenswrapper[5002]: I0930 13:15:02.193727 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/795e98c6-49c0-4e5c-8526-85039800ebcc-catalog-content\") pod \"redhat-operators-tvsq5\" (UID: \"795e98c6-49c0-4e5c-8526-85039800ebcc\") " pod="openshift-marketplace/redhat-operators-tvsq5" Sep 30 13:15:02 crc kubenswrapper[5002]: I0930 13:15:02.216714 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqcdh\" (UniqueName: \"kubernetes.io/projected/795e98c6-49c0-4e5c-8526-85039800ebcc-kube-api-access-nqcdh\") pod \"redhat-operators-tvsq5\" (UID: \"795e98c6-49c0-4e5c-8526-85039800ebcc\") " pod="openshift-marketplace/redhat-operators-tvsq5" Sep 30 13:15:02 crc kubenswrapper[5002]: I0930 13:15:02.365927 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tvsq5" Sep 30 13:15:02 crc kubenswrapper[5002]: I0930 13:15:02.824640 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tvsq5"] Sep 30 13:15:02 crc kubenswrapper[5002]: I0930 13:15:02.904933 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tvsq5" event={"ID":"795e98c6-49c0-4e5c-8526-85039800ebcc","Type":"ContainerStarted","Data":"555cc9285f02194700659d6005fa2b97385e635997db94d241912a83a015d8c3"} Sep 30 13:15:03 crc kubenswrapper[5002]: I0930 13:15:03.243688 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320635-5lczz" Sep 30 13:15:03 crc kubenswrapper[5002]: I0930 13:15:03.317572 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8-secret-volume\") pod \"f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8\" (UID: \"f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8\") " Sep 30 13:15:03 crc kubenswrapper[5002]: I0930 13:15:03.317861 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jmtfz\" (UniqueName: \"kubernetes.io/projected/f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8-kube-api-access-jmtfz\") pod \"f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8\" (UID: \"f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8\") " Sep 30 13:15:03 crc kubenswrapper[5002]: I0930 13:15:03.317990 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8-config-volume\") pod \"f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8\" (UID: \"f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8\") " Sep 30 13:15:03 crc kubenswrapper[5002]: I0930 13:15:03.318593 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8-config-volume" (OuterVolumeSpecName: "config-volume") pod "f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8" (UID: "f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:15:03 crc kubenswrapper[5002]: I0930 13:15:03.324532 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8-kube-api-access-jmtfz" (OuterVolumeSpecName: "kube-api-access-jmtfz") pod "f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8" (UID: "f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8"). InnerVolumeSpecName "kube-api-access-jmtfz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:15:03 crc kubenswrapper[5002]: I0930 13:15:03.325815 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8" (UID: "f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:15:03 crc kubenswrapper[5002]: I0930 13:15:03.420176 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jmtfz\" (UniqueName: \"kubernetes.io/projected/f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8-kube-api-access-jmtfz\") on node \"crc\" DevicePath \"\"" Sep 30 13:15:03 crc kubenswrapper[5002]: I0930 13:15:03.420216 5002 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 13:15:03 crc kubenswrapper[5002]: I0930 13:15:03.420230 5002 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 13:15:03 crc kubenswrapper[5002]: I0930 13:15:03.921208 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320635-5lczz" event={"ID":"f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8","Type":"ContainerDied","Data":"ec4a06852ca5393cf4db8b5dbb6a27177f4fa0190ff6c55a72a033888a74996f"} Sep 30 13:15:03 crc kubenswrapper[5002]: I0930 13:15:03.921593 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec4a06852ca5393cf4db8b5dbb6a27177f4fa0190ff6c55a72a033888a74996f" Sep 30 13:15:03 crc kubenswrapper[5002]: I0930 13:15:03.921283 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320635-5lczz" Sep 30 13:15:03 crc kubenswrapper[5002]: I0930 13:15:03.925167 5002 generic.go:334] "Generic (PLEG): container finished" podID="795e98c6-49c0-4e5c-8526-85039800ebcc" containerID="c9a7ffbe67c204489c894dade8b0ea52edd0864302149693e04249049308e99d" exitCode=0 Sep 30 13:15:03 crc kubenswrapper[5002]: I0930 13:15:03.925220 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tvsq5" event={"ID":"795e98c6-49c0-4e5c-8526-85039800ebcc","Type":"ContainerDied","Data":"c9a7ffbe67c204489c894dade8b0ea52edd0864302149693e04249049308e99d"} Sep 30 13:15:03 crc kubenswrapper[5002]: I0930 13:15:03.927143 5002 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 13:15:04 crc kubenswrapper[5002]: I0930 13:15:04.321333 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320590-5z254"] Sep 30 13:15:04 crc kubenswrapper[5002]: I0930 13:15:04.329979 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320590-5z254"] Sep 30 13:15:04 crc kubenswrapper[5002]: I0930 13:15:04.691112 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2e6d49f-130e-4161-9ee5-72ad4a0bc56c" path="/var/lib/kubelet/pods/e2e6d49f-130e-4161-9ee5-72ad4a0bc56c/volumes" Sep 30 13:15:04 crc kubenswrapper[5002]: I0930 13:15:04.939739 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tvsq5" event={"ID":"795e98c6-49c0-4e5c-8526-85039800ebcc","Type":"ContainerStarted","Data":"f91ad67e730a45fc8681914f0de14c53fcf7f46d1937720f2d30e65b7867d4fb"} Sep 30 13:15:05 crc kubenswrapper[5002]: I0930 13:15:05.955014 5002 generic.go:334] "Generic (PLEG): container finished" podID="795e98c6-49c0-4e5c-8526-85039800ebcc" containerID="f91ad67e730a45fc8681914f0de14c53fcf7f46d1937720f2d30e65b7867d4fb" exitCode=0 Sep 30 13:15:05 crc kubenswrapper[5002]: I0930 13:15:05.955058 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tvsq5" event={"ID":"795e98c6-49c0-4e5c-8526-85039800ebcc","Type":"ContainerDied","Data":"f91ad67e730a45fc8681914f0de14c53fcf7f46d1937720f2d30e65b7867d4fb"} Sep 30 13:15:06 crc kubenswrapper[5002]: I0930 13:15:06.965595 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tvsq5" event={"ID":"795e98c6-49c0-4e5c-8526-85039800ebcc","Type":"ContainerStarted","Data":"c14ebabe779f7ee30d745c2511b9aecab36be3fe938ed719d5ef9886d0b8486b"} Sep 30 13:15:06 crc kubenswrapper[5002]: I0930 13:15:06.986011 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tvsq5" podStartSLOduration=2.37024343 podStartE2EDuration="4.985993533s" podCreationTimestamp="2025-09-30 13:15:02 +0000 UTC" firstStartedPulling="2025-09-30 13:15:03.926951361 +0000 UTC m=+3278.176633507" lastFinishedPulling="2025-09-30 13:15:06.542701454 +0000 UTC m=+3280.792383610" observedRunningTime="2025-09-30 13:15:06.979996252 +0000 UTC m=+3281.229678468" watchObservedRunningTime="2025-09-30 13:15:06.985993533 +0000 UTC m=+3281.235675679" Sep 30 13:15:12 crc kubenswrapper[5002]: I0930 13:15:12.366517 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tvsq5" Sep 30 13:15:12 crc kubenswrapper[5002]: I0930 13:15:12.367036 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tvsq5" Sep 30 13:15:12 crc kubenswrapper[5002]: I0930 13:15:12.410854 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tvsq5" Sep 30 13:15:13 crc kubenswrapper[5002]: I0930 13:15:13.070167 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tvsq5" Sep 30 13:15:13 crc kubenswrapper[5002]: I0930 13:15:13.113101 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tvsq5"] Sep 30 13:15:15 crc kubenswrapper[5002]: I0930 13:15:15.035620 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tvsq5" podUID="795e98c6-49c0-4e5c-8526-85039800ebcc" containerName="registry-server" containerID="cri-o://c14ebabe779f7ee30d745c2511b9aecab36be3fe938ed719d5ef9886d0b8486b" gracePeriod=2 Sep 30 13:15:15 crc kubenswrapper[5002]: I0930 13:15:15.576431 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tvsq5" Sep 30 13:15:15 crc kubenswrapper[5002]: I0930 13:15:15.661873 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/795e98c6-49c0-4e5c-8526-85039800ebcc-utilities\") pod \"795e98c6-49c0-4e5c-8526-85039800ebcc\" (UID: \"795e98c6-49c0-4e5c-8526-85039800ebcc\") " Sep 30 13:15:15 crc kubenswrapper[5002]: I0930 13:15:15.661991 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/795e98c6-49c0-4e5c-8526-85039800ebcc-catalog-content\") pod \"795e98c6-49c0-4e5c-8526-85039800ebcc\" (UID: \"795e98c6-49c0-4e5c-8526-85039800ebcc\") " Sep 30 13:15:15 crc kubenswrapper[5002]: I0930 13:15:15.662043 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nqcdh\" (UniqueName: \"kubernetes.io/projected/795e98c6-49c0-4e5c-8526-85039800ebcc-kube-api-access-nqcdh\") pod \"795e98c6-49c0-4e5c-8526-85039800ebcc\" (UID: \"795e98c6-49c0-4e5c-8526-85039800ebcc\") " Sep 30 13:15:15 crc kubenswrapper[5002]: I0930 13:15:15.662917 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/795e98c6-49c0-4e5c-8526-85039800ebcc-utilities" (OuterVolumeSpecName: "utilities") pod "795e98c6-49c0-4e5c-8526-85039800ebcc" (UID: "795e98c6-49c0-4e5c-8526-85039800ebcc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:15:15 crc kubenswrapper[5002]: I0930 13:15:15.667242 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/795e98c6-49c0-4e5c-8526-85039800ebcc-kube-api-access-nqcdh" (OuterVolumeSpecName: "kube-api-access-nqcdh") pod "795e98c6-49c0-4e5c-8526-85039800ebcc" (UID: "795e98c6-49c0-4e5c-8526-85039800ebcc"). InnerVolumeSpecName "kube-api-access-nqcdh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:15:15 crc kubenswrapper[5002]: I0930 13:15:15.748140 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/795e98c6-49c0-4e5c-8526-85039800ebcc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "795e98c6-49c0-4e5c-8526-85039800ebcc" (UID: "795e98c6-49c0-4e5c-8526-85039800ebcc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:15:15 crc kubenswrapper[5002]: I0930 13:15:15.765049 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/795e98c6-49c0-4e5c-8526-85039800ebcc-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 13:15:15 crc kubenswrapper[5002]: I0930 13:15:15.765077 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/795e98c6-49c0-4e5c-8526-85039800ebcc-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 13:15:15 crc kubenswrapper[5002]: I0930 13:15:15.765088 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nqcdh\" (UniqueName: \"kubernetes.io/projected/795e98c6-49c0-4e5c-8526-85039800ebcc-kube-api-access-nqcdh\") on node \"crc\" DevicePath \"\"" Sep 30 13:15:16 crc kubenswrapper[5002]: I0930 13:15:16.047557 5002 generic.go:334] "Generic (PLEG): container finished" podID="795e98c6-49c0-4e5c-8526-85039800ebcc" containerID="c14ebabe779f7ee30d745c2511b9aecab36be3fe938ed719d5ef9886d0b8486b" exitCode=0 Sep 30 13:15:16 crc kubenswrapper[5002]: I0930 13:15:16.047615 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tvsq5" Sep 30 13:15:16 crc kubenswrapper[5002]: I0930 13:15:16.047621 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tvsq5" event={"ID":"795e98c6-49c0-4e5c-8526-85039800ebcc","Type":"ContainerDied","Data":"c14ebabe779f7ee30d745c2511b9aecab36be3fe938ed719d5ef9886d0b8486b"} Sep 30 13:15:16 crc kubenswrapper[5002]: I0930 13:15:16.047675 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tvsq5" event={"ID":"795e98c6-49c0-4e5c-8526-85039800ebcc","Type":"ContainerDied","Data":"555cc9285f02194700659d6005fa2b97385e635997db94d241912a83a015d8c3"} Sep 30 13:15:16 crc kubenswrapper[5002]: I0930 13:15:16.047760 5002 scope.go:117] "RemoveContainer" containerID="c14ebabe779f7ee30d745c2511b9aecab36be3fe938ed719d5ef9886d0b8486b" Sep 30 13:15:16 crc kubenswrapper[5002]: I0930 13:15:16.092397 5002 scope.go:117] "RemoveContainer" containerID="f91ad67e730a45fc8681914f0de14c53fcf7f46d1937720f2d30e65b7867d4fb" Sep 30 13:15:16 crc kubenswrapper[5002]: I0930 13:15:16.098418 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tvsq5"] Sep 30 13:15:16 crc kubenswrapper[5002]: I0930 13:15:16.109176 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tvsq5"] Sep 30 13:15:16 crc kubenswrapper[5002]: I0930 13:15:16.118268 5002 scope.go:117] "RemoveContainer" containerID="c9a7ffbe67c204489c894dade8b0ea52edd0864302149693e04249049308e99d" Sep 30 13:15:16 crc kubenswrapper[5002]: I0930 13:15:16.180832 5002 scope.go:117] "RemoveContainer" containerID="c14ebabe779f7ee30d745c2511b9aecab36be3fe938ed719d5ef9886d0b8486b" Sep 30 13:15:16 crc kubenswrapper[5002]: E0930 13:15:16.181697 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c14ebabe779f7ee30d745c2511b9aecab36be3fe938ed719d5ef9886d0b8486b\": container with ID starting with c14ebabe779f7ee30d745c2511b9aecab36be3fe938ed719d5ef9886d0b8486b not found: ID does not exist" containerID="c14ebabe779f7ee30d745c2511b9aecab36be3fe938ed719d5ef9886d0b8486b" Sep 30 13:15:16 crc kubenswrapper[5002]: I0930 13:15:16.181761 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c14ebabe779f7ee30d745c2511b9aecab36be3fe938ed719d5ef9886d0b8486b"} err="failed to get container status \"c14ebabe779f7ee30d745c2511b9aecab36be3fe938ed719d5ef9886d0b8486b\": rpc error: code = NotFound desc = could not find container \"c14ebabe779f7ee30d745c2511b9aecab36be3fe938ed719d5ef9886d0b8486b\": container with ID starting with c14ebabe779f7ee30d745c2511b9aecab36be3fe938ed719d5ef9886d0b8486b not found: ID does not exist" Sep 30 13:15:16 crc kubenswrapper[5002]: I0930 13:15:16.181789 5002 scope.go:117] "RemoveContainer" containerID="f91ad67e730a45fc8681914f0de14c53fcf7f46d1937720f2d30e65b7867d4fb" Sep 30 13:15:16 crc kubenswrapper[5002]: E0930 13:15:16.182050 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f91ad67e730a45fc8681914f0de14c53fcf7f46d1937720f2d30e65b7867d4fb\": container with ID starting with f91ad67e730a45fc8681914f0de14c53fcf7f46d1937720f2d30e65b7867d4fb not found: ID does not exist" containerID="f91ad67e730a45fc8681914f0de14c53fcf7f46d1937720f2d30e65b7867d4fb" Sep 30 13:15:16 crc kubenswrapper[5002]: I0930 13:15:16.182079 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f91ad67e730a45fc8681914f0de14c53fcf7f46d1937720f2d30e65b7867d4fb"} err="failed to get container status \"f91ad67e730a45fc8681914f0de14c53fcf7f46d1937720f2d30e65b7867d4fb\": rpc error: code = NotFound desc = could not find container \"f91ad67e730a45fc8681914f0de14c53fcf7f46d1937720f2d30e65b7867d4fb\": container with ID starting with f91ad67e730a45fc8681914f0de14c53fcf7f46d1937720f2d30e65b7867d4fb not found: ID does not exist" Sep 30 13:15:16 crc kubenswrapper[5002]: I0930 13:15:16.182098 5002 scope.go:117] "RemoveContainer" containerID="c9a7ffbe67c204489c894dade8b0ea52edd0864302149693e04249049308e99d" Sep 30 13:15:16 crc kubenswrapper[5002]: E0930 13:15:16.182411 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9a7ffbe67c204489c894dade8b0ea52edd0864302149693e04249049308e99d\": container with ID starting with c9a7ffbe67c204489c894dade8b0ea52edd0864302149693e04249049308e99d not found: ID does not exist" containerID="c9a7ffbe67c204489c894dade8b0ea52edd0864302149693e04249049308e99d" Sep 30 13:15:16 crc kubenswrapper[5002]: I0930 13:15:16.182497 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9a7ffbe67c204489c894dade8b0ea52edd0864302149693e04249049308e99d"} err="failed to get container status \"c9a7ffbe67c204489c894dade8b0ea52edd0864302149693e04249049308e99d\": rpc error: code = NotFound desc = could not find container \"c9a7ffbe67c204489c894dade8b0ea52edd0864302149693e04249049308e99d\": container with ID starting with c9a7ffbe67c204489c894dade8b0ea52edd0864302149693e04249049308e99d not found: ID does not exist" Sep 30 13:15:16 crc kubenswrapper[5002]: I0930 13:15:16.693239 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="795e98c6-49c0-4e5c-8526-85039800ebcc" path="/var/lib/kubelet/pods/795e98c6-49c0-4e5c-8526-85039800ebcc/volumes" Sep 30 13:15:32 crc kubenswrapper[5002]: I0930 13:15:32.098998 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:15:32 crc kubenswrapper[5002]: I0930 13:15:32.099596 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:16:02 crc kubenswrapper[5002]: I0930 13:16:02.098489 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:16:02 crc kubenswrapper[5002]: I0930 13:16:02.098978 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:16:02 crc kubenswrapper[5002]: I0930 13:16:02.816663 5002 scope.go:117] "RemoveContainer" containerID="b8e2fa48c2bd78b2959ae19ad926ca8b3fda7ab4ae4fc6033ae2f3eab06dfdeb" Sep 30 13:16:32 crc kubenswrapper[5002]: I0930 13:16:32.098498 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:16:32 crc kubenswrapper[5002]: I0930 13:16:32.099092 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:16:32 crc kubenswrapper[5002]: I0930 13:16:32.099162 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" Sep 30 13:16:32 crc kubenswrapper[5002]: I0930 13:16:32.100025 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"563c0f60a2e42abd33256343ccd18a018715598f4986d0dbab88b973fb61a222"} pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 13:16:32 crc kubenswrapper[5002]: I0930 13:16:32.100082 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" containerID="cri-o://563c0f60a2e42abd33256343ccd18a018715598f4986d0dbab88b973fb61a222" gracePeriod=600 Sep 30 13:16:32 crc kubenswrapper[5002]: I0930 13:16:32.732027 5002 generic.go:334] "Generic (PLEG): container finished" podID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerID="563c0f60a2e42abd33256343ccd18a018715598f4986d0dbab88b973fb61a222" exitCode=0 Sep 30 13:16:32 crc kubenswrapper[5002]: I0930 13:16:32.732137 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerDied","Data":"563c0f60a2e42abd33256343ccd18a018715598f4986d0dbab88b973fb61a222"} Sep 30 13:16:32 crc kubenswrapper[5002]: I0930 13:16:32.732381 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerStarted","Data":"99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b"} Sep 30 13:16:32 crc kubenswrapper[5002]: I0930 13:16:32.732411 5002 scope.go:117] "RemoveContainer" containerID="1220b4b7f62978ac482c2e64e77e405623ac305ba80fca6277deaf50597c3a98" Sep 30 13:16:42 crc kubenswrapper[5002]: I0930 13:16:42.895193 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-klqms"] Sep 30 13:16:42 crc kubenswrapper[5002]: E0930 13:16:42.896233 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="795e98c6-49c0-4e5c-8526-85039800ebcc" containerName="extract-utilities" Sep 30 13:16:42 crc kubenswrapper[5002]: I0930 13:16:42.896251 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="795e98c6-49c0-4e5c-8526-85039800ebcc" containerName="extract-utilities" Sep 30 13:16:42 crc kubenswrapper[5002]: E0930 13:16:42.896263 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="795e98c6-49c0-4e5c-8526-85039800ebcc" containerName="registry-server" Sep 30 13:16:42 crc kubenswrapper[5002]: I0930 13:16:42.896271 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="795e98c6-49c0-4e5c-8526-85039800ebcc" containerName="registry-server" Sep 30 13:16:42 crc kubenswrapper[5002]: E0930 13:16:42.896291 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="795e98c6-49c0-4e5c-8526-85039800ebcc" containerName="extract-content" Sep 30 13:16:42 crc kubenswrapper[5002]: I0930 13:16:42.896301 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="795e98c6-49c0-4e5c-8526-85039800ebcc" containerName="extract-content" Sep 30 13:16:42 crc kubenswrapper[5002]: E0930 13:16:42.896316 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8" containerName="collect-profiles" Sep 30 13:16:42 crc kubenswrapper[5002]: I0930 13:16:42.896324 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8" containerName="collect-profiles" Sep 30 13:16:42 crc kubenswrapper[5002]: I0930 13:16:42.896624 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f02ff4ba-79b6-46ed-9a2d-f0e71b4e87e8" containerName="collect-profiles" Sep 30 13:16:42 crc kubenswrapper[5002]: I0930 13:16:42.896657 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="795e98c6-49c0-4e5c-8526-85039800ebcc" containerName="registry-server" Sep 30 13:16:42 crc kubenswrapper[5002]: I0930 13:16:42.898635 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-klqms" Sep 30 13:16:42 crc kubenswrapper[5002]: I0930 13:16:42.906168 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-klqms"] Sep 30 13:16:43 crc kubenswrapper[5002]: I0930 13:16:43.099022 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5919963-8c5d-43c2-b165-2bf6aa17c4ad-catalog-content\") pod \"certified-operators-klqms\" (UID: \"d5919963-8c5d-43c2-b165-2bf6aa17c4ad\") " pod="openshift-marketplace/certified-operators-klqms" Sep 30 13:16:43 crc kubenswrapper[5002]: I0930 13:16:43.099817 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46wr9\" (UniqueName: \"kubernetes.io/projected/d5919963-8c5d-43c2-b165-2bf6aa17c4ad-kube-api-access-46wr9\") pod \"certified-operators-klqms\" (UID: \"d5919963-8c5d-43c2-b165-2bf6aa17c4ad\") " pod="openshift-marketplace/certified-operators-klqms" Sep 30 13:16:43 crc kubenswrapper[5002]: I0930 13:16:43.099914 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5919963-8c5d-43c2-b165-2bf6aa17c4ad-utilities\") pod \"certified-operators-klqms\" (UID: \"d5919963-8c5d-43c2-b165-2bf6aa17c4ad\") " pod="openshift-marketplace/certified-operators-klqms" Sep 30 13:16:43 crc kubenswrapper[5002]: I0930 13:16:43.202124 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46wr9\" (UniqueName: \"kubernetes.io/projected/d5919963-8c5d-43c2-b165-2bf6aa17c4ad-kube-api-access-46wr9\") pod \"certified-operators-klqms\" (UID: \"d5919963-8c5d-43c2-b165-2bf6aa17c4ad\") " pod="openshift-marketplace/certified-operators-klqms" Sep 30 13:16:43 crc kubenswrapper[5002]: I0930 13:16:43.202436 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5919963-8c5d-43c2-b165-2bf6aa17c4ad-utilities\") pod \"certified-operators-klqms\" (UID: \"d5919963-8c5d-43c2-b165-2bf6aa17c4ad\") " pod="openshift-marketplace/certified-operators-klqms" Sep 30 13:16:43 crc kubenswrapper[5002]: I0930 13:16:43.202566 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5919963-8c5d-43c2-b165-2bf6aa17c4ad-catalog-content\") pod \"certified-operators-klqms\" (UID: \"d5919963-8c5d-43c2-b165-2bf6aa17c4ad\") " pod="openshift-marketplace/certified-operators-klqms" Sep 30 13:16:43 crc kubenswrapper[5002]: I0930 13:16:43.203030 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5919963-8c5d-43c2-b165-2bf6aa17c4ad-utilities\") pod \"certified-operators-klqms\" (UID: \"d5919963-8c5d-43c2-b165-2bf6aa17c4ad\") " pod="openshift-marketplace/certified-operators-klqms" Sep 30 13:16:43 crc kubenswrapper[5002]: I0930 13:16:43.203099 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5919963-8c5d-43c2-b165-2bf6aa17c4ad-catalog-content\") pod \"certified-operators-klqms\" (UID: \"d5919963-8c5d-43c2-b165-2bf6aa17c4ad\") " pod="openshift-marketplace/certified-operators-klqms" Sep 30 13:16:43 crc kubenswrapper[5002]: I0930 13:16:43.227670 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46wr9\" (UniqueName: \"kubernetes.io/projected/d5919963-8c5d-43c2-b165-2bf6aa17c4ad-kube-api-access-46wr9\") pod \"certified-operators-klqms\" (UID: \"d5919963-8c5d-43c2-b165-2bf6aa17c4ad\") " pod="openshift-marketplace/certified-operators-klqms" Sep 30 13:16:43 crc kubenswrapper[5002]: I0930 13:16:43.518933 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-klqms" Sep 30 13:16:43 crc kubenswrapper[5002]: I0930 13:16:43.966886 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-klqms"] Sep 30 13:16:44 crc kubenswrapper[5002]: I0930 13:16:44.841840 5002 generic.go:334] "Generic (PLEG): container finished" podID="d5919963-8c5d-43c2-b165-2bf6aa17c4ad" containerID="8682528c8dcf33894d6e54920190ae726a56c44757c59fabed96a85fb8d0ea60" exitCode=0 Sep 30 13:16:44 crc kubenswrapper[5002]: I0930 13:16:44.841891 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-klqms" event={"ID":"d5919963-8c5d-43c2-b165-2bf6aa17c4ad","Type":"ContainerDied","Data":"8682528c8dcf33894d6e54920190ae726a56c44757c59fabed96a85fb8d0ea60"} Sep 30 13:16:44 crc kubenswrapper[5002]: I0930 13:16:44.841922 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-klqms" event={"ID":"d5919963-8c5d-43c2-b165-2bf6aa17c4ad","Type":"ContainerStarted","Data":"0951199184b77a956c5b3c49e6ba422f2873ba28a16f9dca31fa76a6f2f0eb03"} Sep 30 13:16:45 crc kubenswrapper[5002]: I0930 13:16:45.854967 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-klqms" event={"ID":"d5919963-8c5d-43c2-b165-2bf6aa17c4ad","Type":"ContainerStarted","Data":"5c3c7e498fc1395ac3d1752da2d1f9b08c06e53d8bfaef975c8bf55f6c8f39ef"} Sep 30 13:16:46 crc kubenswrapper[5002]: I0930 13:16:46.865547 5002 generic.go:334] "Generic (PLEG): container finished" podID="d5919963-8c5d-43c2-b165-2bf6aa17c4ad" containerID="5c3c7e498fc1395ac3d1752da2d1f9b08c06e53d8bfaef975c8bf55f6c8f39ef" exitCode=0 Sep 30 13:16:46 crc kubenswrapper[5002]: I0930 13:16:46.865655 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-klqms" event={"ID":"d5919963-8c5d-43c2-b165-2bf6aa17c4ad","Type":"ContainerDied","Data":"5c3c7e498fc1395ac3d1752da2d1f9b08c06e53d8bfaef975c8bf55f6c8f39ef"} Sep 30 13:16:47 crc kubenswrapper[5002]: I0930 13:16:47.891778 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-klqms" event={"ID":"d5919963-8c5d-43c2-b165-2bf6aa17c4ad","Type":"ContainerStarted","Data":"9a8fa4fb63ac02bde084a75e378c67000b8d517c7fcfa1a7022eb4abe8c18043"} Sep 30 13:16:47 crc kubenswrapper[5002]: I0930 13:16:47.917582 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-klqms" podStartSLOduration=3.435073565 podStartE2EDuration="5.917558091s" podCreationTimestamp="2025-09-30 13:16:42 +0000 UTC" firstStartedPulling="2025-09-30 13:16:44.844921594 +0000 UTC m=+3379.094603740" lastFinishedPulling="2025-09-30 13:16:47.32740608 +0000 UTC m=+3381.577088266" observedRunningTime="2025-09-30 13:16:47.911232712 +0000 UTC m=+3382.160914878" watchObservedRunningTime="2025-09-30 13:16:47.917558091 +0000 UTC m=+3382.167240237" Sep 30 13:16:53 crc kubenswrapper[5002]: I0930 13:16:53.519247 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-klqms" Sep 30 13:16:53 crc kubenswrapper[5002]: I0930 13:16:53.519850 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-klqms" Sep 30 13:16:53 crc kubenswrapper[5002]: I0930 13:16:53.586920 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-klqms" Sep 30 13:16:54 crc kubenswrapper[5002]: I0930 13:16:54.002653 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-klqms" Sep 30 13:16:54 crc kubenswrapper[5002]: I0930 13:16:54.049865 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-klqms"] Sep 30 13:16:55 crc kubenswrapper[5002]: I0930 13:16:55.965530 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-klqms" podUID="d5919963-8c5d-43c2-b165-2bf6aa17c4ad" containerName="registry-server" containerID="cri-o://9a8fa4fb63ac02bde084a75e378c67000b8d517c7fcfa1a7022eb4abe8c18043" gracePeriod=2 Sep 30 13:16:56 crc kubenswrapper[5002]: I0930 13:16:56.464564 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-klqms" Sep 30 13:16:56 crc kubenswrapper[5002]: I0930 13:16:56.562299 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5919963-8c5d-43c2-b165-2bf6aa17c4ad-utilities\") pod \"d5919963-8c5d-43c2-b165-2bf6aa17c4ad\" (UID: \"d5919963-8c5d-43c2-b165-2bf6aa17c4ad\") " Sep 30 13:16:56 crc kubenswrapper[5002]: I0930 13:16:56.562379 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5919963-8c5d-43c2-b165-2bf6aa17c4ad-catalog-content\") pod \"d5919963-8c5d-43c2-b165-2bf6aa17c4ad\" (UID: \"d5919963-8c5d-43c2-b165-2bf6aa17c4ad\") " Sep 30 13:16:56 crc kubenswrapper[5002]: I0930 13:16:56.562464 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46wr9\" (UniqueName: \"kubernetes.io/projected/d5919963-8c5d-43c2-b165-2bf6aa17c4ad-kube-api-access-46wr9\") pod \"d5919963-8c5d-43c2-b165-2bf6aa17c4ad\" (UID: \"d5919963-8c5d-43c2-b165-2bf6aa17c4ad\") " Sep 30 13:16:56 crc kubenswrapper[5002]: I0930 13:16:56.563245 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d5919963-8c5d-43c2-b165-2bf6aa17c4ad-utilities" (OuterVolumeSpecName: "utilities") pod "d5919963-8c5d-43c2-b165-2bf6aa17c4ad" (UID: "d5919963-8c5d-43c2-b165-2bf6aa17c4ad"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:16:56 crc kubenswrapper[5002]: I0930 13:16:56.567851 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5919963-8c5d-43c2-b165-2bf6aa17c4ad-kube-api-access-46wr9" (OuterVolumeSpecName: "kube-api-access-46wr9") pod "d5919963-8c5d-43c2-b165-2bf6aa17c4ad" (UID: "d5919963-8c5d-43c2-b165-2bf6aa17c4ad"). InnerVolumeSpecName "kube-api-access-46wr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:16:56 crc kubenswrapper[5002]: I0930 13:16:56.611843 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d5919963-8c5d-43c2-b165-2bf6aa17c4ad-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d5919963-8c5d-43c2-b165-2bf6aa17c4ad" (UID: "d5919963-8c5d-43c2-b165-2bf6aa17c4ad"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:16:56 crc kubenswrapper[5002]: I0930 13:16:56.663874 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5919963-8c5d-43c2-b165-2bf6aa17c4ad-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 13:16:56 crc kubenswrapper[5002]: I0930 13:16:56.663906 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5919963-8c5d-43c2-b165-2bf6aa17c4ad-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 13:16:56 crc kubenswrapper[5002]: I0930 13:16:56.663922 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46wr9\" (UniqueName: \"kubernetes.io/projected/d5919963-8c5d-43c2-b165-2bf6aa17c4ad-kube-api-access-46wr9\") on node \"crc\" DevicePath \"\"" Sep 30 13:16:56 crc kubenswrapper[5002]: I0930 13:16:56.975664 5002 generic.go:334] "Generic (PLEG): container finished" podID="d5919963-8c5d-43c2-b165-2bf6aa17c4ad" containerID="9a8fa4fb63ac02bde084a75e378c67000b8d517c7fcfa1a7022eb4abe8c18043" exitCode=0 Sep 30 13:16:56 crc kubenswrapper[5002]: I0930 13:16:56.975713 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-klqms" event={"ID":"d5919963-8c5d-43c2-b165-2bf6aa17c4ad","Type":"ContainerDied","Data":"9a8fa4fb63ac02bde084a75e378c67000b8d517c7fcfa1a7022eb4abe8c18043"} Sep 30 13:16:56 crc kubenswrapper[5002]: I0930 13:16:56.975734 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-klqms" Sep 30 13:16:56 crc kubenswrapper[5002]: I0930 13:16:56.975756 5002 scope.go:117] "RemoveContainer" containerID="9a8fa4fb63ac02bde084a75e378c67000b8d517c7fcfa1a7022eb4abe8c18043" Sep 30 13:16:56 crc kubenswrapper[5002]: I0930 13:16:56.975742 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-klqms" event={"ID":"d5919963-8c5d-43c2-b165-2bf6aa17c4ad","Type":"ContainerDied","Data":"0951199184b77a956c5b3c49e6ba422f2873ba28a16f9dca31fa76a6f2f0eb03"} Sep 30 13:16:56 crc kubenswrapper[5002]: I0930 13:16:56.996964 5002 scope.go:117] "RemoveContainer" containerID="5c3c7e498fc1395ac3d1752da2d1f9b08c06e53d8bfaef975c8bf55f6c8f39ef" Sep 30 13:16:56 crc kubenswrapper[5002]: I0930 13:16:56.997771 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-klqms"] Sep 30 13:16:57 crc kubenswrapper[5002]: I0930 13:16:57.007088 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-klqms"] Sep 30 13:16:57 crc kubenswrapper[5002]: I0930 13:16:57.016442 5002 scope.go:117] "RemoveContainer" containerID="8682528c8dcf33894d6e54920190ae726a56c44757c59fabed96a85fb8d0ea60" Sep 30 13:16:57 crc kubenswrapper[5002]: I0930 13:16:57.064497 5002 scope.go:117] "RemoveContainer" containerID="9a8fa4fb63ac02bde084a75e378c67000b8d517c7fcfa1a7022eb4abe8c18043" Sep 30 13:16:57 crc kubenswrapper[5002]: E0930 13:16:57.065075 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a8fa4fb63ac02bde084a75e378c67000b8d517c7fcfa1a7022eb4abe8c18043\": container with ID starting with 9a8fa4fb63ac02bde084a75e378c67000b8d517c7fcfa1a7022eb4abe8c18043 not found: ID does not exist" containerID="9a8fa4fb63ac02bde084a75e378c67000b8d517c7fcfa1a7022eb4abe8c18043" Sep 30 13:16:57 crc kubenswrapper[5002]: I0930 13:16:57.065149 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a8fa4fb63ac02bde084a75e378c67000b8d517c7fcfa1a7022eb4abe8c18043"} err="failed to get container status \"9a8fa4fb63ac02bde084a75e378c67000b8d517c7fcfa1a7022eb4abe8c18043\": rpc error: code = NotFound desc = could not find container \"9a8fa4fb63ac02bde084a75e378c67000b8d517c7fcfa1a7022eb4abe8c18043\": container with ID starting with 9a8fa4fb63ac02bde084a75e378c67000b8d517c7fcfa1a7022eb4abe8c18043 not found: ID does not exist" Sep 30 13:16:57 crc kubenswrapper[5002]: I0930 13:16:57.065195 5002 scope.go:117] "RemoveContainer" containerID="5c3c7e498fc1395ac3d1752da2d1f9b08c06e53d8bfaef975c8bf55f6c8f39ef" Sep 30 13:16:57 crc kubenswrapper[5002]: E0930 13:16:57.065637 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c3c7e498fc1395ac3d1752da2d1f9b08c06e53d8bfaef975c8bf55f6c8f39ef\": container with ID starting with 5c3c7e498fc1395ac3d1752da2d1f9b08c06e53d8bfaef975c8bf55f6c8f39ef not found: ID does not exist" containerID="5c3c7e498fc1395ac3d1752da2d1f9b08c06e53d8bfaef975c8bf55f6c8f39ef" Sep 30 13:16:57 crc kubenswrapper[5002]: I0930 13:16:57.065670 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c3c7e498fc1395ac3d1752da2d1f9b08c06e53d8bfaef975c8bf55f6c8f39ef"} err="failed to get container status \"5c3c7e498fc1395ac3d1752da2d1f9b08c06e53d8bfaef975c8bf55f6c8f39ef\": rpc error: code = NotFound desc = could not find container \"5c3c7e498fc1395ac3d1752da2d1f9b08c06e53d8bfaef975c8bf55f6c8f39ef\": container with ID starting with 5c3c7e498fc1395ac3d1752da2d1f9b08c06e53d8bfaef975c8bf55f6c8f39ef not found: ID does not exist" Sep 30 13:16:57 crc kubenswrapper[5002]: I0930 13:16:57.065692 5002 scope.go:117] "RemoveContainer" containerID="8682528c8dcf33894d6e54920190ae726a56c44757c59fabed96a85fb8d0ea60" Sep 30 13:16:57 crc kubenswrapper[5002]: E0930 13:16:57.065942 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8682528c8dcf33894d6e54920190ae726a56c44757c59fabed96a85fb8d0ea60\": container with ID starting with 8682528c8dcf33894d6e54920190ae726a56c44757c59fabed96a85fb8d0ea60 not found: ID does not exist" containerID="8682528c8dcf33894d6e54920190ae726a56c44757c59fabed96a85fb8d0ea60" Sep 30 13:16:57 crc kubenswrapper[5002]: I0930 13:16:57.065976 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8682528c8dcf33894d6e54920190ae726a56c44757c59fabed96a85fb8d0ea60"} err="failed to get container status \"8682528c8dcf33894d6e54920190ae726a56c44757c59fabed96a85fb8d0ea60\": rpc error: code = NotFound desc = could not find container \"8682528c8dcf33894d6e54920190ae726a56c44757c59fabed96a85fb8d0ea60\": container with ID starting with 8682528c8dcf33894d6e54920190ae726a56c44757c59fabed96a85fb8d0ea60 not found: ID does not exist" Sep 30 13:16:58 crc kubenswrapper[5002]: I0930 13:16:58.686919 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5919963-8c5d-43c2-b165-2bf6aa17c4ad" path="/var/lib/kubelet/pods/d5919963-8c5d-43c2-b165-2bf6aa17c4ad/volumes" Sep 30 13:17:03 crc kubenswrapper[5002]: I0930 13:17:03.050406 5002 generic.go:334] "Generic (PLEG): container finished" podID="32bc4a2b-b531-4126-8920-ec50156dc863" containerID="bdd0ec470ba32d08cb4315ac5d534d320832f8a15a8be87b1b855f9a988aa604" exitCode=0 Sep 30 13:17:03 crc kubenswrapper[5002]: I0930 13:17:03.050545 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"32bc4a2b-b531-4126-8920-ec50156dc863","Type":"ContainerDied","Data":"bdd0ec470ba32d08cb4315ac5d534d320832f8a15a8be87b1b855f9a988aa604"} Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.423806 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.610238 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/32bc4a2b-b531-4126-8920-ec50156dc863-openstack-config\") pod \"32bc4a2b-b531-4126-8920-ec50156dc863\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.610308 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/32bc4a2b-b531-4126-8920-ec50156dc863-ca-certs\") pod \"32bc4a2b-b531-4126-8920-ec50156dc863\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.610374 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/32bc4a2b-b531-4126-8920-ec50156dc863-ssh-key\") pod \"32bc4a2b-b531-4126-8920-ec50156dc863\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.610433 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/32bc4a2b-b531-4126-8920-ec50156dc863-config-data\") pod \"32bc4a2b-b531-4126-8920-ec50156dc863\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.610458 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/32bc4a2b-b531-4126-8920-ec50156dc863-openstack-config-secret\") pod \"32bc4a2b-b531-4126-8920-ec50156dc863\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.610713 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/32bc4a2b-b531-4126-8920-ec50156dc863-test-operator-ephemeral-workdir\") pod \"32bc4a2b-b531-4126-8920-ec50156dc863\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.610760 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dccr4\" (UniqueName: \"kubernetes.io/projected/32bc4a2b-b531-4126-8920-ec50156dc863-kube-api-access-dccr4\") pod \"32bc4a2b-b531-4126-8920-ec50156dc863\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.610794 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/32bc4a2b-b531-4126-8920-ec50156dc863-test-operator-ephemeral-temporary\") pod \"32bc4a2b-b531-4126-8920-ec50156dc863\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.610888 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"32bc4a2b-b531-4126-8920-ec50156dc863\" (UID: \"32bc4a2b-b531-4126-8920-ec50156dc863\") " Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.611462 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32bc4a2b-b531-4126-8920-ec50156dc863-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "32bc4a2b-b531-4126-8920-ec50156dc863" (UID: "32bc4a2b-b531-4126-8920-ec50156dc863"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.612812 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32bc4a2b-b531-4126-8920-ec50156dc863-config-data" (OuterVolumeSpecName: "config-data") pod "32bc4a2b-b531-4126-8920-ec50156dc863" (UID: "32bc4a2b-b531-4126-8920-ec50156dc863"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.616276 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32bc4a2b-b531-4126-8920-ec50156dc863-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "32bc4a2b-b531-4126-8920-ec50156dc863" (UID: "32bc4a2b-b531-4126-8920-ec50156dc863"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.616569 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32bc4a2b-b531-4126-8920-ec50156dc863-kube-api-access-dccr4" (OuterVolumeSpecName: "kube-api-access-dccr4") pod "32bc4a2b-b531-4126-8920-ec50156dc863" (UID: "32bc4a2b-b531-4126-8920-ec50156dc863"). InnerVolumeSpecName "kube-api-access-dccr4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.617530 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "test-operator-logs") pod "32bc4a2b-b531-4126-8920-ec50156dc863" (UID: "32bc4a2b-b531-4126-8920-ec50156dc863"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.644271 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32bc4a2b-b531-4126-8920-ec50156dc863-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "32bc4a2b-b531-4126-8920-ec50156dc863" (UID: "32bc4a2b-b531-4126-8920-ec50156dc863"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.649908 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32bc4a2b-b531-4126-8920-ec50156dc863-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "32bc4a2b-b531-4126-8920-ec50156dc863" (UID: "32bc4a2b-b531-4126-8920-ec50156dc863"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.654325 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32bc4a2b-b531-4126-8920-ec50156dc863-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "32bc4a2b-b531-4126-8920-ec50156dc863" (UID: "32bc4a2b-b531-4126-8920-ec50156dc863"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.667515 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32bc4a2b-b531-4126-8920-ec50156dc863-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "32bc4a2b-b531-4126-8920-ec50156dc863" (UID: "32bc4a2b-b531-4126-8920-ec50156dc863"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.713060 5002 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/32bc4a2b-b531-4126-8920-ec50156dc863-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.713105 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dccr4\" (UniqueName: \"kubernetes.io/projected/32bc4a2b-b531-4126-8920-ec50156dc863-kube-api-access-dccr4\") on node \"crc\" DevicePath \"\"" Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.713138 5002 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/32bc4a2b-b531-4126-8920-ec50156dc863-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.713181 5002 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.713216 5002 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/32bc4a2b-b531-4126-8920-ec50156dc863-ca-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.713227 5002 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/32bc4a2b-b531-4126-8920-ec50156dc863-openstack-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.713238 5002 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/32bc4a2b-b531-4126-8920-ec50156dc863-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.713247 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/32bc4a2b-b531-4126-8920-ec50156dc863-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.713256 5002 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/32bc4a2b-b531-4126-8920-ec50156dc863-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.735945 5002 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Sep 30 13:17:04 crc kubenswrapper[5002]: I0930 13:17:04.815189 5002 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Sep 30 13:17:05 crc kubenswrapper[5002]: I0930 13:17:05.073157 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"32bc4a2b-b531-4126-8920-ec50156dc863","Type":"ContainerDied","Data":"dd342fbb4d502066a3601f484d61c6b2664a704c1566a169f763bbae6df328d4"} Sep 30 13:17:05 crc kubenswrapper[5002]: I0930 13:17:05.073204 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dd342fbb4d502066a3601f484d61c6b2664a704c1566a169f763bbae6df328d4" Sep 30 13:17:05 crc kubenswrapper[5002]: I0930 13:17:05.073236 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 30 13:17:10 crc kubenswrapper[5002]: I0930 13:17:10.563099 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 30 13:17:10 crc kubenswrapper[5002]: E0930 13:17:10.564218 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5919963-8c5d-43c2-b165-2bf6aa17c4ad" containerName="registry-server" Sep 30 13:17:10 crc kubenswrapper[5002]: I0930 13:17:10.564234 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5919963-8c5d-43c2-b165-2bf6aa17c4ad" containerName="registry-server" Sep 30 13:17:10 crc kubenswrapper[5002]: E0930 13:17:10.564254 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32bc4a2b-b531-4126-8920-ec50156dc863" containerName="tempest-tests-tempest-tests-runner" Sep 30 13:17:10 crc kubenswrapper[5002]: I0930 13:17:10.564261 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="32bc4a2b-b531-4126-8920-ec50156dc863" containerName="tempest-tests-tempest-tests-runner" Sep 30 13:17:10 crc kubenswrapper[5002]: E0930 13:17:10.564270 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5919963-8c5d-43c2-b165-2bf6aa17c4ad" containerName="extract-utilities" Sep 30 13:17:10 crc kubenswrapper[5002]: I0930 13:17:10.564278 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5919963-8c5d-43c2-b165-2bf6aa17c4ad" containerName="extract-utilities" Sep 30 13:17:10 crc kubenswrapper[5002]: E0930 13:17:10.564310 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5919963-8c5d-43c2-b165-2bf6aa17c4ad" containerName="extract-content" Sep 30 13:17:10 crc kubenswrapper[5002]: I0930 13:17:10.564317 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5919963-8c5d-43c2-b165-2bf6aa17c4ad" containerName="extract-content" Sep 30 13:17:10 crc kubenswrapper[5002]: I0930 13:17:10.564570 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="32bc4a2b-b531-4126-8920-ec50156dc863" containerName="tempest-tests-tempest-tests-runner" Sep 30 13:17:10 crc kubenswrapper[5002]: I0930 13:17:10.564596 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5919963-8c5d-43c2-b165-2bf6aa17c4ad" containerName="registry-server" Sep 30 13:17:10 crc kubenswrapper[5002]: I0930 13:17:10.565187 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 13:17:10 crc kubenswrapper[5002]: I0930 13:17:10.568146 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-xltdh" Sep 30 13:17:10 crc kubenswrapper[5002]: I0930 13:17:10.571497 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 30 13:17:10 crc kubenswrapper[5002]: I0930 13:17:10.729743 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kcl8\" (UniqueName: \"kubernetes.io/projected/50ed8d15-6cc0-4e0f-9116-35c93f45d6d4-kube-api-access-9kcl8\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"50ed8d15-6cc0-4e0f-9116-35c93f45d6d4\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 13:17:10 crc kubenswrapper[5002]: I0930 13:17:10.730095 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"50ed8d15-6cc0-4e0f-9116-35c93f45d6d4\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 13:17:10 crc kubenswrapper[5002]: I0930 13:17:10.832383 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kcl8\" (UniqueName: \"kubernetes.io/projected/50ed8d15-6cc0-4e0f-9116-35c93f45d6d4-kube-api-access-9kcl8\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"50ed8d15-6cc0-4e0f-9116-35c93f45d6d4\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 13:17:10 crc kubenswrapper[5002]: I0930 13:17:10.832587 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"50ed8d15-6cc0-4e0f-9116-35c93f45d6d4\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 13:17:10 crc kubenswrapper[5002]: I0930 13:17:10.833507 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"50ed8d15-6cc0-4e0f-9116-35c93f45d6d4\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 13:17:10 crc kubenswrapper[5002]: I0930 13:17:10.864678 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kcl8\" (UniqueName: \"kubernetes.io/projected/50ed8d15-6cc0-4e0f-9116-35c93f45d6d4-kube-api-access-9kcl8\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"50ed8d15-6cc0-4e0f-9116-35c93f45d6d4\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 13:17:10 crc kubenswrapper[5002]: I0930 13:17:10.870425 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"50ed8d15-6cc0-4e0f-9116-35c93f45d6d4\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 13:17:10 crc kubenswrapper[5002]: I0930 13:17:10.890443 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 13:17:11 crc kubenswrapper[5002]: I0930 13:17:11.334361 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 30 13:17:12 crc kubenswrapper[5002]: I0930 13:17:12.154408 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"50ed8d15-6cc0-4e0f-9116-35c93f45d6d4","Type":"ContainerStarted","Data":"a103c06d57e21dcf0359208302d7019a86387fcaf7d36975e6a5a12b1944c844"} Sep 30 13:17:13 crc kubenswrapper[5002]: I0930 13:17:13.166648 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"50ed8d15-6cc0-4e0f-9116-35c93f45d6d4","Type":"ContainerStarted","Data":"679d5214d71b25f41883370548870720c311453b3f94d897f26d7d1454bf7c65"} Sep 30 13:17:13 crc kubenswrapper[5002]: I0930 13:17:13.184931 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.344368418 podStartE2EDuration="3.184912208s" podCreationTimestamp="2025-09-30 13:17:10 +0000 UTC" firstStartedPulling="2025-09-30 13:17:11.339282009 +0000 UTC m=+3405.588964155" lastFinishedPulling="2025-09-30 13:17:12.179825799 +0000 UTC m=+3406.429507945" observedRunningTime="2025-09-30 13:17:13.181700053 +0000 UTC m=+3407.431382219" watchObservedRunningTime="2025-09-30 13:17:13.184912208 +0000 UTC m=+3407.434594354" Sep 30 13:17:30 crc kubenswrapper[5002]: I0930 13:17:30.618492 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-4ppnm/must-gather-f26t8"] Sep 30 13:17:30 crc kubenswrapper[5002]: I0930 13:17:30.620608 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4ppnm/must-gather-f26t8" Sep 30 13:17:30 crc kubenswrapper[5002]: I0930 13:17:30.623113 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-4ppnm"/"default-dockercfg-66tmh" Sep 30 13:17:30 crc kubenswrapper[5002]: I0930 13:17:30.623209 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-4ppnm"/"kube-root-ca.crt" Sep 30 13:17:30 crc kubenswrapper[5002]: I0930 13:17:30.623408 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-4ppnm"/"openshift-service-ca.crt" Sep 30 13:17:30 crc kubenswrapper[5002]: I0930 13:17:30.632486 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-4ppnm/must-gather-f26t8"] Sep 30 13:17:30 crc kubenswrapper[5002]: I0930 13:17:30.720016 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/27d1ca3b-9ebd-4455-972d-11f395adb2c7-must-gather-output\") pod \"must-gather-f26t8\" (UID: \"27d1ca3b-9ebd-4455-972d-11f395adb2c7\") " pod="openshift-must-gather-4ppnm/must-gather-f26t8" Sep 30 13:17:30 crc kubenswrapper[5002]: I0930 13:17:30.720115 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxln5\" (UniqueName: \"kubernetes.io/projected/27d1ca3b-9ebd-4455-972d-11f395adb2c7-kube-api-access-zxln5\") pod \"must-gather-f26t8\" (UID: \"27d1ca3b-9ebd-4455-972d-11f395adb2c7\") " pod="openshift-must-gather-4ppnm/must-gather-f26t8" Sep 30 13:17:30 crc kubenswrapper[5002]: I0930 13:17:30.821542 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxln5\" (UniqueName: \"kubernetes.io/projected/27d1ca3b-9ebd-4455-972d-11f395adb2c7-kube-api-access-zxln5\") pod \"must-gather-f26t8\" (UID: \"27d1ca3b-9ebd-4455-972d-11f395adb2c7\") " pod="openshift-must-gather-4ppnm/must-gather-f26t8" Sep 30 13:17:30 crc kubenswrapper[5002]: I0930 13:17:30.821706 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/27d1ca3b-9ebd-4455-972d-11f395adb2c7-must-gather-output\") pod \"must-gather-f26t8\" (UID: \"27d1ca3b-9ebd-4455-972d-11f395adb2c7\") " pod="openshift-must-gather-4ppnm/must-gather-f26t8" Sep 30 13:17:30 crc kubenswrapper[5002]: I0930 13:17:30.822412 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/27d1ca3b-9ebd-4455-972d-11f395adb2c7-must-gather-output\") pod \"must-gather-f26t8\" (UID: \"27d1ca3b-9ebd-4455-972d-11f395adb2c7\") " pod="openshift-must-gather-4ppnm/must-gather-f26t8" Sep 30 13:17:30 crc kubenswrapper[5002]: I0930 13:17:30.840878 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxln5\" (UniqueName: \"kubernetes.io/projected/27d1ca3b-9ebd-4455-972d-11f395adb2c7-kube-api-access-zxln5\") pod \"must-gather-f26t8\" (UID: \"27d1ca3b-9ebd-4455-972d-11f395adb2c7\") " pod="openshift-must-gather-4ppnm/must-gather-f26t8" Sep 30 13:17:30 crc kubenswrapper[5002]: I0930 13:17:30.944211 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4ppnm/must-gather-f26t8" Sep 30 13:17:31 crc kubenswrapper[5002]: I0930 13:17:31.389909 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-4ppnm/must-gather-f26t8"] Sep 30 13:17:32 crc kubenswrapper[5002]: I0930 13:17:32.333599 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4ppnm/must-gather-f26t8" event={"ID":"27d1ca3b-9ebd-4455-972d-11f395adb2c7","Type":"ContainerStarted","Data":"5f57178b55f3f3fe88193379309f87cd254d887fae78ed4277523d20006bf755"} Sep 30 13:17:36 crc kubenswrapper[5002]: I0930 13:17:36.373341 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4ppnm/must-gather-f26t8" event={"ID":"27d1ca3b-9ebd-4455-972d-11f395adb2c7","Type":"ContainerStarted","Data":"ba809f0e6cb72d539b4eb13abc5edc183dfb9f0701c0f704e748bcdff41085e6"} Sep 30 13:17:36 crc kubenswrapper[5002]: I0930 13:17:36.373776 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4ppnm/must-gather-f26t8" event={"ID":"27d1ca3b-9ebd-4455-972d-11f395adb2c7","Type":"ContainerStarted","Data":"901e8c96f367fde5db58e1d9a45ae121840574075d6496bbf8f8dd0cf9e9f4ce"} Sep 30 13:17:39 crc kubenswrapper[5002]: I0930 13:17:39.091757 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-4ppnm/must-gather-f26t8" podStartSLOduration=5.266840043 podStartE2EDuration="9.091723607s" podCreationTimestamp="2025-09-30 13:17:30 +0000 UTC" firstStartedPulling="2025-09-30 13:17:31.395603049 +0000 UTC m=+3425.645285195" lastFinishedPulling="2025-09-30 13:17:35.220486593 +0000 UTC m=+3429.470168759" observedRunningTime="2025-09-30 13:17:36.390752994 +0000 UTC m=+3430.640435140" watchObservedRunningTime="2025-09-30 13:17:39.091723607 +0000 UTC m=+3433.341405753" Sep 30 13:17:39 crc kubenswrapper[5002]: I0930 13:17:39.098935 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-4ppnm/crc-debug-5v4xh"] Sep 30 13:17:39 crc kubenswrapper[5002]: I0930 13:17:39.100789 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4ppnm/crc-debug-5v4xh" Sep 30 13:17:39 crc kubenswrapper[5002]: I0930 13:17:39.275607 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d3362723-ac17-40a0-b304-77476e1a80b8-host\") pod \"crc-debug-5v4xh\" (UID: \"d3362723-ac17-40a0-b304-77476e1a80b8\") " pod="openshift-must-gather-4ppnm/crc-debug-5v4xh" Sep 30 13:17:39 crc kubenswrapper[5002]: I0930 13:17:39.276345 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7lb9\" (UniqueName: \"kubernetes.io/projected/d3362723-ac17-40a0-b304-77476e1a80b8-kube-api-access-m7lb9\") pod \"crc-debug-5v4xh\" (UID: \"d3362723-ac17-40a0-b304-77476e1a80b8\") " pod="openshift-must-gather-4ppnm/crc-debug-5v4xh" Sep 30 13:17:39 crc kubenswrapper[5002]: I0930 13:17:39.378866 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7lb9\" (UniqueName: \"kubernetes.io/projected/d3362723-ac17-40a0-b304-77476e1a80b8-kube-api-access-m7lb9\") pod \"crc-debug-5v4xh\" (UID: \"d3362723-ac17-40a0-b304-77476e1a80b8\") " pod="openshift-must-gather-4ppnm/crc-debug-5v4xh" Sep 30 13:17:39 crc kubenswrapper[5002]: I0930 13:17:39.378989 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d3362723-ac17-40a0-b304-77476e1a80b8-host\") pod \"crc-debug-5v4xh\" (UID: \"d3362723-ac17-40a0-b304-77476e1a80b8\") " pod="openshift-must-gather-4ppnm/crc-debug-5v4xh" Sep 30 13:17:39 crc kubenswrapper[5002]: I0930 13:17:39.379100 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d3362723-ac17-40a0-b304-77476e1a80b8-host\") pod \"crc-debug-5v4xh\" (UID: \"d3362723-ac17-40a0-b304-77476e1a80b8\") " pod="openshift-must-gather-4ppnm/crc-debug-5v4xh" Sep 30 13:17:39 crc kubenswrapper[5002]: I0930 13:17:39.398702 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7lb9\" (UniqueName: \"kubernetes.io/projected/d3362723-ac17-40a0-b304-77476e1a80b8-kube-api-access-m7lb9\") pod \"crc-debug-5v4xh\" (UID: \"d3362723-ac17-40a0-b304-77476e1a80b8\") " pod="openshift-must-gather-4ppnm/crc-debug-5v4xh" Sep 30 13:17:39 crc kubenswrapper[5002]: I0930 13:17:39.422784 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4ppnm/crc-debug-5v4xh" Sep 30 13:17:39 crc kubenswrapper[5002]: W0930 13:17:39.455055 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd3362723_ac17_40a0_b304_77476e1a80b8.slice/crio-faaeaf3e1790bdc3db4f9e664144500fd2218412ac9b07df8e5c3244493e43ce WatchSource:0}: Error finding container faaeaf3e1790bdc3db4f9e664144500fd2218412ac9b07df8e5c3244493e43ce: Status 404 returned error can't find the container with id faaeaf3e1790bdc3db4f9e664144500fd2218412ac9b07df8e5c3244493e43ce Sep 30 13:17:40 crc kubenswrapper[5002]: I0930 13:17:40.422196 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4ppnm/crc-debug-5v4xh" event={"ID":"d3362723-ac17-40a0-b304-77476e1a80b8","Type":"ContainerStarted","Data":"faaeaf3e1790bdc3db4f9e664144500fd2218412ac9b07df8e5c3244493e43ce"} Sep 30 13:17:51 crc kubenswrapper[5002]: I0930 13:17:51.532062 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4ppnm/crc-debug-5v4xh" event={"ID":"d3362723-ac17-40a0-b304-77476e1a80b8","Type":"ContainerStarted","Data":"b27c37db4f5c063bfaadcef119c5af3b321cd4287f504e684cdef59724eb7c97"} Sep 30 13:17:51 crc kubenswrapper[5002]: I0930 13:17:51.558585 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-4ppnm/crc-debug-5v4xh" podStartSLOduration=1.7606269719999998 podStartE2EDuration="12.558555842s" podCreationTimestamp="2025-09-30 13:17:39 +0000 UTC" firstStartedPulling="2025-09-30 13:17:39.457603662 +0000 UTC m=+3433.707285808" lastFinishedPulling="2025-09-30 13:17:50.255532532 +0000 UTC m=+3444.505214678" observedRunningTime="2025-09-30 13:17:51.546048398 +0000 UTC m=+3445.795730584" watchObservedRunningTime="2025-09-30 13:17:51.558555842 +0000 UTC m=+3445.808238008" Sep 30 13:18:32 crc kubenswrapper[5002]: I0930 13:18:32.097944 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:18:32 crc kubenswrapper[5002]: I0930 13:18:32.098399 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:18:37 crc kubenswrapper[5002]: I0930 13:18:37.617962 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-858c4cb9d6-g5ts6_ed88e034-9e24-4611-9d19-90530ff3f7b1/barbican-api-log/0.log" Sep 30 13:18:37 crc kubenswrapper[5002]: I0930 13:18:37.654507 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-858c4cb9d6-g5ts6_ed88e034-9e24-4611-9d19-90530ff3f7b1/barbican-api/0.log" Sep 30 13:18:37 crc kubenswrapper[5002]: I0930 13:18:37.851858 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-64bf5676fd-f8zrr_fc432228-1ae9-4a76-a81f-a8a7d2d44492/barbican-keystone-listener/0.log" Sep 30 13:18:37 crc kubenswrapper[5002]: I0930 13:18:37.984232 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-64bf5676fd-f8zrr_fc432228-1ae9-4a76-a81f-a8a7d2d44492/barbican-keystone-listener-log/0.log" Sep 30 13:18:38 crc kubenswrapper[5002]: I0930 13:18:38.048517 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5976c988bc-j7gh5_295327d8-7973-4826-a8ab-34dcf2f4b5d5/barbican-worker/0.log" Sep 30 13:18:38 crc kubenswrapper[5002]: I0930 13:18:38.218733 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5976c988bc-j7gh5_295327d8-7973-4826-a8ab-34dcf2f4b5d5/barbican-worker-log/0.log" Sep 30 13:18:38 crc kubenswrapper[5002]: I0930 13:18:38.411950 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z_016126fa-8541-4424-b217-acf7d88e5680/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:18:38 crc kubenswrapper[5002]: I0930 13:18:38.622064 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_85c8795c-0a51-4d53-a20d-b0b96d217d93/ceilometer-central-agent/0.log" Sep 30 13:18:38 crc kubenswrapper[5002]: I0930 13:18:38.669295 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_85c8795c-0a51-4d53-a20d-b0b96d217d93/proxy-httpd/0.log" Sep 30 13:18:38 crc kubenswrapper[5002]: I0930 13:18:38.792406 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_85c8795c-0a51-4d53-a20d-b0b96d217d93/ceilometer-notification-agent/0.log" Sep 30 13:18:38 crc kubenswrapper[5002]: I0930 13:18:38.800807 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_85c8795c-0a51-4d53-a20d-b0b96d217d93/sg-core/0.log" Sep 30 13:18:39 crc kubenswrapper[5002]: I0930 13:18:39.023554 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_f71d6351-0ed1-4047-bffa-cc1020e38ecf/cinder-api-log/0.log" Sep 30 13:18:39 crc kubenswrapper[5002]: I0930 13:18:39.050880 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_f71d6351-0ed1-4047-bffa-cc1020e38ecf/cinder-api/0.log" Sep 30 13:18:39 crc kubenswrapper[5002]: I0930 13:18:39.247359 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_9504c15f-ff11-4255-9371-b0481f692c0b/cinder-scheduler/0.log" Sep 30 13:18:39 crc kubenswrapper[5002]: I0930 13:18:39.272927 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_9504c15f-ff11-4255-9371-b0481f692c0b/probe/0.log" Sep 30 13:18:39 crc kubenswrapper[5002]: I0930 13:18:39.436514 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-p69m6_0b171fad-8a7f-4271-b1e6-43b03111244d/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:18:39 crc kubenswrapper[5002]: I0930 13:18:39.622748 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm_36ca4049-6444-4a67-b607-b15095a3dabf/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:18:39 crc kubenswrapper[5002]: I0930 13:18:39.744351 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv_f4f8c21f-af18-49de-8a07-140d16e9785f/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:18:39 crc kubenswrapper[5002]: I0930 13:18:39.870611 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-kzgd6_8ef8ec7f-169f-494e-b17d-4206c144d4f3/init/0.log" Sep 30 13:18:40 crc kubenswrapper[5002]: I0930 13:18:40.019233 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-kzgd6_8ef8ec7f-169f-494e-b17d-4206c144d4f3/init/0.log" Sep 30 13:18:40 crc kubenswrapper[5002]: I0930 13:18:40.085604 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-kzgd6_8ef8ec7f-169f-494e-b17d-4206c144d4f3/dnsmasq-dns/0.log" Sep 30 13:18:40 crc kubenswrapper[5002]: I0930 13:18:40.244256 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2_5389ead8-63bd-4216-9653-48519fa391fb/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:18:40 crc kubenswrapper[5002]: I0930 13:18:40.333355 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_b5a818b5-30cc-4c21-b7c4-7563b49832eb/glance-httpd/0.log" Sep 30 13:18:40 crc kubenswrapper[5002]: I0930 13:18:40.449813 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_b5a818b5-30cc-4c21-b7c4-7563b49832eb/glance-log/0.log" Sep 30 13:18:40 crc kubenswrapper[5002]: I0930 13:18:40.572708 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_17156d80-5354-430c-a7f9-294bae55a11c/glance-httpd/0.log" Sep 30 13:18:40 crc kubenswrapper[5002]: I0930 13:18:40.776033 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_17156d80-5354-430c-a7f9-294bae55a11c/glance-log/0.log" Sep 30 13:18:40 crc kubenswrapper[5002]: I0930 13:18:40.999380 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-66c799f4f6-jprmr_7c12a4dd-a3df-4106-ab48-b628b89b3277/horizon/0.log" Sep 30 13:18:41 crc kubenswrapper[5002]: I0930 13:18:41.091865 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw_61c70258-1787-4522-810a-af2ac9e07703/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:18:41 crc kubenswrapper[5002]: I0930 13:18:41.224335 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-66c799f4f6-jprmr_7c12a4dd-a3df-4106-ab48-b628b89b3277/horizon-log/0.log" Sep 30 13:18:41 crc kubenswrapper[5002]: I0930 13:18:41.263357 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-dpzsf_8ebc7091-7fed-4943-9bff-8d1d9ab3db90/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:18:41 crc kubenswrapper[5002]: I0930 13:18:41.499376 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29320621-l2g2n_321c3fc2-62d9-46a2-99b3-d1bc4e7e534c/keystone-cron/0.log" Sep 30 13:18:41 crc kubenswrapper[5002]: I0930 13:18:41.595517 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-84489c98f8-p85zv_1c943245-6e36-4812-9694-48a5c2747a90/keystone-api/0.log" Sep 30 13:18:41 crc kubenswrapper[5002]: I0930 13:18:41.696286 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_16e6726b-1f13-4bd9-a6a0-326e726dd86a/kube-state-metrics/0.log" Sep 30 13:18:41 crc kubenswrapper[5002]: I0930 13:18:41.788378 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v_7a8b7e27-6872-47a1-b564-9a288ac7cef0/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:18:42 crc kubenswrapper[5002]: I0930 13:18:42.119544 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5c946cd5ff-n7x9t_1df12b89-f778-45b2-b39a-c95700262b6e/neutron-httpd/0.log" Sep 30 13:18:42 crc kubenswrapper[5002]: I0930 13:18:42.202015 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5c946cd5ff-n7x9t_1df12b89-f778-45b2-b39a-c95700262b6e/neutron-api/0.log" Sep 30 13:18:42 crc kubenswrapper[5002]: I0930 13:18:42.370365 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr_afe17fe9-74c6-442a-a2c3-70958d7a706b/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:18:43 crc kubenswrapper[5002]: I0930 13:18:43.098913 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_822ff6cf-9987-40ed-929e-615b255cc532/nova-api-log/0.log" Sep 30 13:18:43 crc kubenswrapper[5002]: I0930 13:18:43.338048 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_972b29bd-a22c-486e-ac29-6d075c3b26a7/nova-cell0-conductor-conductor/0.log" Sep 30 13:18:43 crc kubenswrapper[5002]: I0930 13:18:43.398281 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_822ff6cf-9987-40ed-929e-615b255cc532/nova-api-api/0.log" Sep 30 13:18:43 crc kubenswrapper[5002]: I0930 13:18:43.641156 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_b6734a59-b52d-4116-bd32-31431b949757/nova-cell1-conductor-conductor/0.log" Sep 30 13:18:43 crc kubenswrapper[5002]: I0930 13:18:43.704141 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_18ab92e5-cde3-4728-9782-42025fa3d6b4/nova-cell1-novncproxy-novncproxy/0.log" Sep 30 13:18:44 crc kubenswrapper[5002]: I0930 13:18:44.023993 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-txpxh_d81299d2-2b37-4c3f-b313-d02d7b33045c/nova-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:18:44 crc kubenswrapper[5002]: I0930 13:18:44.258656 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_cc36d9f0-c2f5-463f-8f7b-3824c9bd9629/nova-metadata-log/0.log" Sep 30 13:18:44 crc kubenswrapper[5002]: I0930 13:18:44.683008 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_dd4640b7-43e4-4029-88fa-bb9c4a293794/nova-scheduler-scheduler/0.log" Sep 30 13:18:44 crc kubenswrapper[5002]: I0930 13:18:44.736479 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_e109d20d-1925-4779-bbff-50bd39214d34/mysql-bootstrap/0.log" Sep 30 13:18:44 crc kubenswrapper[5002]: I0930 13:18:44.942703 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_e109d20d-1925-4779-bbff-50bd39214d34/mysql-bootstrap/0.log" Sep 30 13:18:44 crc kubenswrapper[5002]: I0930 13:18:44.990742 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_e109d20d-1925-4779-bbff-50bd39214d34/galera/0.log" Sep 30 13:18:45 crc kubenswrapper[5002]: I0930 13:18:45.252215 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_9d5d9337-f90f-4576-94dc-805d4e653801/mysql-bootstrap/0.log" Sep 30 13:18:45 crc kubenswrapper[5002]: I0930 13:18:45.452062 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_9d5d9337-f90f-4576-94dc-805d4e653801/mysql-bootstrap/0.log" Sep 30 13:18:45 crc kubenswrapper[5002]: I0930 13:18:45.504720 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_9d5d9337-f90f-4576-94dc-805d4e653801/galera/0.log" Sep 30 13:18:45 crc kubenswrapper[5002]: I0930 13:18:45.631874 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_cc36d9f0-c2f5-463f-8f7b-3824c9bd9629/nova-metadata-metadata/0.log" Sep 30 13:18:45 crc kubenswrapper[5002]: I0930 13:18:45.673704 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_369a872a-8dd2-409e-9938-2a01cd707dc8/openstackclient/0.log" Sep 30 13:18:45 crc kubenswrapper[5002]: I0930 13:18:45.878620 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-qztz9_1a0f9c34-465f-4f75-af75-71b2e2d3722a/openstack-network-exporter/0.log" Sep 30 13:18:46 crc kubenswrapper[5002]: I0930 13:18:46.132083 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5m55z_9d2f8d72-e223-4f96-917e-7b47baba18d8/ovsdb-server-init/0.log" Sep 30 13:18:46 crc kubenswrapper[5002]: I0930 13:18:46.312688 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5m55z_9d2f8d72-e223-4f96-917e-7b47baba18d8/ovsdb-server-init/0.log" Sep 30 13:18:46 crc kubenswrapper[5002]: I0930 13:18:46.337619 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5m55z_9d2f8d72-e223-4f96-917e-7b47baba18d8/ovsdb-server/0.log" Sep 30 13:18:46 crc kubenswrapper[5002]: I0930 13:18:46.389125 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5m55z_9d2f8d72-e223-4f96-917e-7b47baba18d8/ovs-vswitchd/0.log" Sep 30 13:18:46 crc kubenswrapper[5002]: I0930 13:18:46.573150 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-qq5zn_6b699340-4bbd-4df4-951b-9404b0545d24/ovn-controller/0.log" Sep 30 13:18:46 crc kubenswrapper[5002]: I0930 13:18:46.779307 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-4rc8x_71bf5034-6600-47c8-ad11-2855276e1356/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:18:46 crc kubenswrapper[5002]: I0930 13:18:46.873204 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_39ee2def-85c4-4070-9392-1f4d9fc2139c/openstack-network-exporter/0.log" Sep 30 13:18:47 crc kubenswrapper[5002]: I0930 13:18:47.001596 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_39ee2def-85c4-4070-9392-1f4d9fc2139c/ovn-northd/0.log" Sep 30 13:18:47 crc kubenswrapper[5002]: I0930 13:18:47.121727 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_1e148ee1-66bc-4300-a27a-a8c4ce515d51/openstack-network-exporter/0.log" Sep 30 13:18:47 crc kubenswrapper[5002]: I0930 13:18:47.233464 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_1e148ee1-66bc-4300-a27a-a8c4ce515d51/ovsdbserver-nb/0.log" Sep 30 13:18:47 crc kubenswrapper[5002]: I0930 13:18:47.330844 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_7304f60c-c750-409f-bca4-4fd12c239891/openstack-network-exporter/0.log" Sep 30 13:18:47 crc kubenswrapper[5002]: I0930 13:18:47.446614 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_7304f60c-c750-409f-bca4-4fd12c239891/ovsdbserver-sb/0.log" Sep 30 13:18:47 crc kubenswrapper[5002]: I0930 13:18:47.871558 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6f8969955b-64x4f_c0d09801-8aec-455d-8e90-cad2e5f5a04e/placement-api/0.log" Sep 30 13:18:47 crc kubenswrapper[5002]: I0930 13:18:47.943186 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6f8969955b-64x4f_c0d09801-8aec-455d-8e90-cad2e5f5a04e/placement-log/0.log" Sep 30 13:18:48 crc kubenswrapper[5002]: I0930 13:18:48.096821 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_4069e130-8a6c-4bf6-9885-b8e35857e519/setup-container/0.log" Sep 30 13:18:48 crc kubenswrapper[5002]: I0930 13:18:48.340637 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_4069e130-8a6c-4bf6-9885-b8e35857e519/rabbitmq/0.log" Sep 30 13:18:48 crc kubenswrapper[5002]: I0930 13:18:48.360951 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_4069e130-8a6c-4bf6-9885-b8e35857e519/setup-container/0.log" Sep 30 13:18:48 crc kubenswrapper[5002]: I0930 13:18:48.579832 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9f3cd025-3ba6-453b-9224-ee63cf57890c/setup-container/0.log" Sep 30 13:18:48 crc kubenswrapper[5002]: I0930 13:18:48.788728 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9f3cd025-3ba6-453b-9224-ee63cf57890c/setup-container/0.log" Sep 30 13:18:48 crc kubenswrapper[5002]: I0930 13:18:48.815911 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9f3cd025-3ba6-453b-9224-ee63cf57890c/rabbitmq/0.log" Sep 30 13:18:49 crc kubenswrapper[5002]: I0930 13:18:49.029496 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7_d6667a58-0566-4c47-8516-b46bed2a0f65/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:18:49 crc kubenswrapper[5002]: I0930 13:18:49.134968 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-mqcxz_ad03eb09-92a2-4d00-9290-8b142d71fea6/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:18:49 crc kubenswrapper[5002]: I0930 13:18:49.299265 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9_67886def-b13b-463d-a4f6-3a0d13fa9580/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:18:49 crc kubenswrapper[5002]: I0930 13:18:49.574279 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-f2n6c_0d552fc4-7e51-426b-99ff-9ed8753f4178/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:18:49 crc kubenswrapper[5002]: I0930 13:18:49.605000 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-w2tmz_13e7455b-ca1c-475f-95af-c9813c0876f7/ssh-known-hosts-edpm-deployment/0.log" Sep 30 13:18:49 crc kubenswrapper[5002]: I0930 13:18:49.859010 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-85654c5dc5-xmznd_c487a894-05f0-4ed3-9b0a-fc5bfbae3f74/proxy-server/0.log" Sep 30 13:18:49 crc kubenswrapper[5002]: I0930 13:18:49.952832 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-85654c5dc5-xmznd_c487a894-05f0-4ed3-9b0a-fc5bfbae3f74/proxy-httpd/0.log" Sep 30 13:18:50 crc kubenswrapper[5002]: I0930 13:18:50.093518 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-hrs89_8a1d9b02-1faf-4a01-82a1-d71e4c154f57/swift-ring-rebalance/0.log" Sep 30 13:18:50 crc kubenswrapper[5002]: I0930 13:18:50.242852 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/account-auditor/0.log" Sep 30 13:18:50 crc kubenswrapper[5002]: I0930 13:18:50.328368 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/account-reaper/0.log" Sep 30 13:18:50 crc kubenswrapper[5002]: I0930 13:18:50.424313 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/account-replicator/0.log" Sep 30 13:18:50 crc kubenswrapper[5002]: I0930 13:18:50.498879 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/account-server/0.log" Sep 30 13:18:50 crc kubenswrapper[5002]: I0930 13:18:50.548620 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/container-auditor/0.log" Sep 30 13:18:50 crc kubenswrapper[5002]: I0930 13:18:50.679476 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/container-replicator/0.log" Sep 30 13:18:50 crc kubenswrapper[5002]: I0930 13:18:50.689933 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/container-server/0.log" Sep 30 13:18:50 crc kubenswrapper[5002]: I0930 13:18:50.738679 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/container-updater/0.log" Sep 30 13:18:50 crc kubenswrapper[5002]: I0930 13:18:50.910014 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/object-auditor/0.log" Sep 30 13:18:50 crc kubenswrapper[5002]: I0930 13:18:50.912695 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/object-expirer/0.log" Sep 30 13:18:50 crc kubenswrapper[5002]: I0930 13:18:50.946831 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/object-replicator/0.log" Sep 30 13:18:51 crc kubenswrapper[5002]: I0930 13:18:51.135871 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/rsync/0.log" Sep 30 13:18:51 crc kubenswrapper[5002]: I0930 13:18:51.139981 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/object-server/0.log" Sep 30 13:18:51 crc kubenswrapper[5002]: I0930 13:18:51.140870 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/object-updater/0.log" Sep 30 13:18:51 crc kubenswrapper[5002]: I0930 13:18:51.332023 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/swift-recon-cron/0.log" Sep 30 13:18:51 crc kubenswrapper[5002]: I0930 13:18:51.452349 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-r748r_5614484d-58b4-42e2-94a5-dda83b89be64/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:18:51 crc kubenswrapper[5002]: I0930 13:18:51.663160 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_32bc4a2b-b531-4126-8920-ec50156dc863/tempest-tests-tempest-tests-runner/0.log" Sep 30 13:18:51 crc kubenswrapper[5002]: I0930 13:18:51.746994 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_50ed8d15-6cc0-4e0f-9116-35c93f45d6d4/test-operator-logs-container/0.log" Sep 30 13:18:51 crc kubenswrapper[5002]: I0930 13:18:51.991661 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-64qkx_a049d73d-c168-40c1-a943-9df4f221879a/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:19:01 crc kubenswrapper[5002]: I0930 13:19:01.293280 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_7864c645-ac32-48bb-a292-0ff4ec2a5955/memcached/0.log" Sep 30 13:19:02 crc kubenswrapper[5002]: I0930 13:19:02.098994 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:19:02 crc kubenswrapper[5002]: I0930 13:19:02.099431 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:19:32 crc kubenswrapper[5002]: I0930 13:19:32.098274 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:19:32 crc kubenswrapper[5002]: I0930 13:19:32.098834 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:19:32 crc kubenswrapper[5002]: I0930 13:19:32.098895 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" Sep 30 13:19:32 crc kubenswrapper[5002]: I0930 13:19:32.099726 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b"} pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 13:19:32 crc kubenswrapper[5002]: I0930 13:19:32.099795 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" containerID="cri-o://99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b" gracePeriod=600 Sep 30 13:19:32 crc kubenswrapper[5002]: E0930 13:19:32.222555 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:19:32 crc kubenswrapper[5002]: I0930 13:19:32.437536 5002 generic.go:334] "Generic (PLEG): container finished" podID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerID="99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b" exitCode=0 Sep 30 13:19:32 crc kubenswrapper[5002]: I0930 13:19:32.437580 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerDied","Data":"99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b"} Sep 30 13:19:32 crc kubenswrapper[5002]: I0930 13:19:32.437613 5002 scope.go:117] "RemoveContainer" containerID="563c0f60a2e42abd33256343ccd18a018715598f4986d0dbab88b973fb61a222" Sep 30 13:19:32 crc kubenswrapper[5002]: I0930 13:19:32.438714 5002 scope.go:117] "RemoveContainer" containerID="99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b" Sep 30 13:19:32 crc kubenswrapper[5002]: E0930 13:19:32.439545 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:19:45 crc kubenswrapper[5002]: I0930 13:19:45.676730 5002 scope.go:117] "RemoveContainer" containerID="99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b" Sep 30 13:19:45 crc kubenswrapper[5002]: E0930 13:19:45.677356 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:20:00 crc kubenswrapper[5002]: I0930 13:20:00.676864 5002 scope.go:117] "RemoveContainer" containerID="99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b" Sep 30 13:20:00 crc kubenswrapper[5002]: E0930 13:20:00.677720 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:20:00 crc kubenswrapper[5002]: I0930 13:20:00.774645 5002 generic.go:334] "Generic (PLEG): container finished" podID="d3362723-ac17-40a0-b304-77476e1a80b8" containerID="b27c37db4f5c063bfaadcef119c5af3b321cd4287f504e684cdef59724eb7c97" exitCode=0 Sep 30 13:20:00 crc kubenswrapper[5002]: I0930 13:20:00.774690 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4ppnm/crc-debug-5v4xh" event={"ID":"d3362723-ac17-40a0-b304-77476e1a80b8","Type":"ContainerDied","Data":"b27c37db4f5c063bfaadcef119c5af3b321cd4287f504e684cdef59724eb7c97"} Sep 30 13:20:01 crc kubenswrapper[5002]: I0930 13:20:01.916249 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4ppnm/crc-debug-5v4xh" Sep 30 13:20:01 crc kubenswrapper[5002]: I0930 13:20:01.962058 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-4ppnm/crc-debug-5v4xh"] Sep 30 13:20:01 crc kubenswrapper[5002]: I0930 13:20:01.974043 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-4ppnm/crc-debug-5v4xh"] Sep 30 13:20:02 crc kubenswrapper[5002]: I0930 13:20:02.062986 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d3362723-ac17-40a0-b304-77476e1a80b8-host\") pod \"d3362723-ac17-40a0-b304-77476e1a80b8\" (UID: \"d3362723-ac17-40a0-b304-77476e1a80b8\") " Sep 30 13:20:02 crc kubenswrapper[5002]: I0930 13:20:02.063103 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d3362723-ac17-40a0-b304-77476e1a80b8-host" (OuterVolumeSpecName: "host") pod "d3362723-ac17-40a0-b304-77476e1a80b8" (UID: "d3362723-ac17-40a0-b304-77476e1a80b8"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:20:02 crc kubenswrapper[5002]: I0930 13:20:02.063180 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m7lb9\" (UniqueName: \"kubernetes.io/projected/d3362723-ac17-40a0-b304-77476e1a80b8-kube-api-access-m7lb9\") pod \"d3362723-ac17-40a0-b304-77476e1a80b8\" (UID: \"d3362723-ac17-40a0-b304-77476e1a80b8\") " Sep 30 13:20:02 crc kubenswrapper[5002]: I0930 13:20:02.063864 5002 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d3362723-ac17-40a0-b304-77476e1a80b8-host\") on node \"crc\" DevicePath \"\"" Sep 30 13:20:02 crc kubenswrapper[5002]: I0930 13:20:02.073332 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3362723-ac17-40a0-b304-77476e1a80b8-kube-api-access-m7lb9" (OuterVolumeSpecName: "kube-api-access-m7lb9") pod "d3362723-ac17-40a0-b304-77476e1a80b8" (UID: "d3362723-ac17-40a0-b304-77476e1a80b8"). InnerVolumeSpecName "kube-api-access-m7lb9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:20:02 crc kubenswrapper[5002]: I0930 13:20:02.166275 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m7lb9\" (UniqueName: \"kubernetes.io/projected/d3362723-ac17-40a0-b304-77476e1a80b8-kube-api-access-m7lb9\") on node \"crc\" DevicePath \"\"" Sep 30 13:20:02 crc kubenswrapper[5002]: I0930 13:20:02.689140 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3362723-ac17-40a0-b304-77476e1a80b8" path="/var/lib/kubelet/pods/d3362723-ac17-40a0-b304-77476e1a80b8/volumes" Sep 30 13:20:02 crc kubenswrapper[5002]: I0930 13:20:02.801143 5002 scope.go:117] "RemoveContainer" containerID="b27c37db4f5c063bfaadcef119c5af3b321cd4287f504e684cdef59724eb7c97" Sep 30 13:20:02 crc kubenswrapper[5002]: I0930 13:20:02.801197 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4ppnm/crc-debug-5v4xh" Sep 30 13:20:03 crc kubenswrapper[5002]: I0930 13:20:03.160009 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-4ppnm/crc-debug-2jrdm"] Sep 30 13:20:03 crc kubenswrapper[5002]: E0930 13:20:03.160379 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3362723-ac17-40a0-b304-77476e1a80b8" containerName="container-00" Sep 30 13:20:03 crc kubenswrapper[5002]: I0930 13:20:03.160390 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3362723-ac17-40a0-b304-77476e1a80b8" containerName="container-00" Sep 30 13:20:03 crc kubenswrapper[5002]: I0930 13:20:03.160581 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3362723-ac17-40a0-b304-77476e1a80b8" containerName="container-00" Sep 30 13:20:03 crc kubenswrapper[5002]: I0930 13:20:03.161215 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4ppnm/crc-debug-2jrdm" Sep 30 13:20:03 crc kubenswrapper[5002]: I0930 13:20:03.289465 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b6c6d724-0c7f-4799-a777-69bbd4edd8a7-host\") pod \"crc-debug-2jrdm\" (UID: \"b6c6d724-0c7f-4799-a777-69bbd4edd8a7\") " pod="openshift-must-gather-4ppnm/crc-debug-2jrdm" Sep 30 13:20:03 crc kubenswrapper[5002]: I0930 13:20:03.289770 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j542b\" (UniqueName: \"kubernetes.io/projected/b6c6d724-0c7f-4799-a777-69bbd4edd8a7-kube-api-access-j542b\") pod \"crc-debug-2jrdm\" (UID: \"b6c6d724-0c7f-4799-a777-69bbd4edd8a7\") " pod="openshift-must-gather-4ppnm/crc-debug-2jrdm" Sep 30 13:20:03 crc kubenswrapper[5002]: I0930 13:20:03.392422 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b6c6d724-0c7f-4799-a777-69bbd4edd8a7-host\") pod \"crc-debug-2jrdm\" (UID: \"b6c6d724-0c7f-4799-a777-69bbd4edd8a7\") " pod="openshift-must-gather-4ppnm/crc-debug-2jrdm" Sep 30 13:20:03 crc kubenswrapper[5002]: I0930 13:20:03.392531 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j542b\" (UniqueName: \"kubernetes.io/projected/b6c6d724-0c7f-4799-a777-69bbd4edd8a7-kube-api-access-j542b\") pod \"crc-debug-2jrdm\" (UID: \"b6c6d724-0c7f-4799-a777-69bbd4edd8a7\") " pod="openshift-must-gather-4ppnm/crc-debug-2jrdm" Sep 30 13:20:03 crc kubenswrapper[5002]: I0930 13:20:03.392551 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b6c6d724-0c7f-4799-a777-69bbd4edd8a7-host\") pod \"crc-debug-2jrdm\" (UID: \"b6c6d724-0c7f-4799-a777-69bbd4edd8a7\") " pod="openshift-must-gather-4ppnm/crc-debug-2jrdm" Sep 30 13:20:03 crc kubenswrapper[5002]: I0930 13:20:03.412789 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j542b\" (UniqueName: \"kubernetes.io/projected/b6c6d724-0c7f-4799-a777-69bbd4edd8a7-kube-api-access-j542b\") pod \"crc-debug-2jrdm\" (UID: \"b6c6d724-0c7f-4799-a777-69bbd4edd8a7\") " pod="openshift-must-gather-4ppnm/crc-debug-2jrdm" Sep 30 13:20:03 crc kubenswrapper[5002]: I0930 13:20:03.482013 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4ppnm/crc-debug-2jrdm" Sep 30 13:20:03 crc kubenswrapper[5002]: W0930 13:20:03.527941 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb6c6d724_0c7f_4799_a777_69bbd4edd8a7.slice/crio-de09b9be9a26205be3411721e983881271988e4e0f23f3870606d5d9d89f79aa WatchSource:0}: Error finding container de09b9be9a26205be3411721e983881271988e4e0f23f3870606d5d9d89f79aa: Status 404 returned error can't find the container with id de09b9be9a26205be3411721e983881271988e4e0f23f3870606d5d9d89f79aa Sep 30 13:20:03 crc kubenswrapper[5002]: I0930 13:20:03.811118 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4ppnm/crc-debug-2jrdm" event={"ID":"b6c6d724-0c7f-4799-a777-69bbd4edd8a7","Type":"ContainerStarted","Data":"01ed2fe0f9f4f58f0ba9b636cdf468b863c117e21bd51bdeade9cba762ec0f3e"} Sep 30 13:20:03 crc kubenswrapper[5002]: I0930 13:20:03.811500 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4ppnm/crc-debug-2jrdm" event={"ID":"b6c6d724-0c7f-4799-a777-69bbd4edd8a7","Type":"ContainerStarted","Data":"de09b9be9a26205be3411721e983881271988e4e0f23f3870606d5d9d89f79aa"} Sep 30 13:20:03 crc kubenswrapper[5002]: I0930 13:20:03.829940 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-4ppnm/crc-debug-2jrdm" podStartSLOduration=0.829904076 podStartE2EDuration="829.904076ms" podCreationTimestamp="2025-09-30 13:20:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:20:03.823723211 +0000 UTC m=+3578.073405367" watchObservedRunningTime="2025-09-30 13:20:03.829904076 +0000 UTC m=+3578.079586262" Sep 30 13:20:04 crc kubenswrapper[5002]: I0930 13:20:04.820177 5002 generic.go:334] "Generic (PLEG): container finished" podID="b6c6d724-0c7f-4799-a777-69bbd4edd8a7" containerID="01ed2fe0f9f4f58f0ba9b636cdf468b863c117e21bd51bdeade9cba762ec0f3e" exitCode=0 Sep 30 13:20:04 crc kubenswrapper[5002]: I0930 13:20:04.820275 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4ppnm/crc-debug-2jrdm" event={"ID":"b6c6d724-0c7f-4799-a777-69bbd4edd8a7","Type":"ContainerDied","Data":"01ed2fe0f9f4f58f0ba9b636cdf468b863c117e21bd51bdeade9cba762ec0f3e"} Sep 30 13:20:05 crc kubenswrapper[5002]: I0930 13:20:05.929379 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4ppnm/crc-debug-2jrdm" Sep 30 13:20:06 crc kubenswrapper[5002]: I0930 13:20:06.028782 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j542b\" (UniqueName: \"kubernetes.io/projected/b6c6d724-0c7f-4799-a777-69bbd4edd8a7-kube-api-access-j542b\") pod \"b6c6d724-0c7f-4799-a777-69bbd4edd8a7\" (UID: \"b6c6d724-0c7f-4799-a777-69bbd4edd8a7\") " Sep 30 13:20:06 crc kubenswrapper[5002]: I0930 13:20:06.028871 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b6c6d724-0c7f-4799-a777-69bbd4edd8a7-host\") pod \"b6c6d724-0c7f-4799-a777-69bbd4edd8a7\" (UID: \"b6c6d724-0c7f-4799-a777-69bbd4edd8a7\") " Sep 30 13:20:06 crc kubenswrapper[5002]: I0930 13:20:06.029023 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b6c6d724-0c7f-4799-a777-69bbd4edd8a7-host" (OuterVolumeSpecName: "host") pod "b6c6d724-0c7f-4799-a777-69bbd4edd8a7" (UID: "b6c6d724-0c7f-4799-a777-69bbd4edd8a7"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:20:06 crc kubenswrapper[5002]: I0930 13:20:06.029357 5002 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b6c6d724-0c7f-4799-a777-69bbd4edd8a7-host\") on node \"crc\" DevicePath \"\"" Sep 30 13:20:06 crc kubenswrapper[5002]: I0930 13:20:06.035275 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6c6d724-0c7f-4799-a777-69bbd4edd8a7-kube-api-access-j542b" (OuterVolumeSpecName: "kube-api-access-j542b") pod "b6c6d724-0c7f-4799-a777-69bbd4edd8a7" (UID: "b6c6d724-0c7f-4799-a777-69bbd4edd8a7"). InnerVolumeSpecName "kube-api-access-j542b". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:20:06 crc kubenswrapper[5002]: I0930 13:20:06.133237 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j542b\" (UniqueName: \"kubernetes.io/projected/b6c6d724-0c7f-4799-a777-69bbd4edd8a7-kube-api-access-j542b\") on node \"crc\" DevicePath \"\"" Sep 30 13:20:06 crc kubenswrapper[5002]: I0930 13:20:06.838190 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4ppnm/crc-debug-2jrdm" event={"ID":"b6c6d724-0c7f-4799-a777-69bbd4edd8a7","Type":"ContainerDied","Data":"de09b9be9a26205be3411721e983881271988e4e0f23f3870606d5d9d89f79aa"} Sep 30 13:20:06 crc kubenswrapper[5002]: I0930 13:20:06.838572 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de09b9be9a26205be3411721e983881271988e4e0f23f3870606d5d9d89f79aa" Sep 30 13:20:06 crc kubenswrapper[5002]: I0930 13:20:06.838246 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4ppnm/crc-debug-2jrdm" Sep 30 13:20:10 crc kubenswrapper[5002]: I0930 13:20:10.655617 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-4ppnm/crc-debug-2jrdm"] Sep 30 13:20:10 crc kubenswrapper[5002]: I0930 13:20:10.664318 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-4ppnm/crc-debug-2jrdm"] Sep 30 13:20:10 crc kubenswrapper[5002]: I0930 13:20:10.688185 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6c6d724-0c7f-4799-a777-69bbd4edd8a7" path="/var/lib/kubelet/pods/b6c6d724-0c7f-4799-a777-69bbd4edd8a7/volumes" Sep 30 13:20:11 crc kubenswrapper[5002]: I0930 13:20:11.816246 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-4ppnm/crc-debug-rv75c"] Sep 30 13:20:11 crc kubenswrapper[5002]: E0930 13:20:11.817038 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6c6d724-0c7f-4799-a777-69bbd4edd8a7" containerName="container-00" Sep 30 13:20:11 crc kubenswrapper[5002]: I0930 13:20:11.817056 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6c6d724-0c7f-4799-a777-69bbd4edd8a7" containerName="container-00" Sep 30 13:20:11 crc kubenswrapper[5002]: I0930 13:20:11.817296 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6c6d724-0c7f-4799-a777-69bbd4edd8a7" containerName="container-00" Sep 30 13:20:11 crc kubenswrapper[5002]: I0930 13:20:11.818044 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4ppnm/crc-debug-rv75c" Sep 30 13:20:11 crc kubenswrapper[5002]: I0930 13:20:11.921276 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ddaa44db-15a4-4286-bc58-6e007d69fecf-host\") pod \"crc-debug-rv75c\" (UID: \"ddaa44db-15a4-4286-bc58-6e007d69fecf\") " pod="openshift-must-gather-4ppnm/crc-debug-rv75c" Sep 30 13:20:11 crc kubenswrapper[5002]: I0930 13:20:11.921524 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7v9lz\" (UniqueName: \"kubernetes.io/projected/ddaa44db-15a4-4286-bc58-6e007d69fecf-kube-api-access-7v9lz\") pod \"crc-debug-rv75c\" (UID: \"ddaa44db-15a4-4286-bc58-6e007d69fecf\") " pod="openshift-must-gather-4ppnm/crc-debug-rv75c" Sep 30 13:20:12 crc kubenswrapper[5002]: I0930 13:20:12.022708 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7v9lz\" (UniqueName: \"kubernetes.io/projected/ddaa44db-15a4-4286-bc58-6e007d69fecf-kube-api-access-7v9lz\") pod \"crc-debug-rv75c\" (UID: \"ddaa44db-15a4-4286-bc58-6e007d69fecf\") " pod="openshift-must-gather-4ppnm/crc-debug-rv75c" Sep 30 13:20:12 crc kubenswrapper[5002]: I0930 13:20:12.022839 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ddaa44db-15a4-4286-bc58-6e007d69fecf-host\") pod \"crc-debug-rv75c\" (UID: \"ddaa44db-15a4-4286-bc58-6e007d69fecf\") " pod="openshift-must-gather-4ppnm/crc-debug-rv75c" Sep 30 13:20:12 crc kubenswrapper[5002]: I0930 13:20:12.022973 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ddaa44db-15a4-4286-bc58-6e007d69fecf-host\") pod \"crc-debug-rv75c\" (UID: \"ddaa44db-15a4-4286-bc58-6e007d69fecf\") " pod="openshift-must-gather-4ppnm/crc-debug-rv75c" Sep 30 13:20:12 crc kubenswrapper[5002]: I0930 13:20:12.046622 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7v9lz\" (UniqueName: \"kubernetes.io/projected/ddaa44db-15a4-4286-bc58-6e007d69fecf-kube-api-access-7v9lz\") pod \"crc-debug-rv75c\" (UID: \"ddaa44db-15a4-4286-bc58-6e007d69fecf\") " pod="openshift-must-gather-4ppnm/crc-debug-rv75c" Sep 30 13:20:12 crc kubenswrapper[5002]: I0930 13:20:12.140335 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4ppnm/crc-debug-rv75c" Sep 30 13:20:12 crc kubenswrapper[5002]: W0930 13:20:12.181348 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podddaa44db_15a4_4286_bc58_6e007d69fecf.slice/crio-78b449a8b1443ad60bc2b8d149243614eab6f07f71726a7e1ca1ade1541bbb0b WatchSource:0}: Error finding container 78b449a8b1443ad60bc2b8d149243614eab6f07f71726a7e1ca1ade1541bbb0b: Status 404 returned error can't find the container with id 78b449a8b1443ad60bc2b8d149243614eab6f07f71726a7e1ca1ade1541bbb0b Sep 30 13:20:12 crc kubenswrapper[5002]: I0930 13:20:12.890515 5002 generic.go:334] "Generic (PLEG): container finished" podID="ddaa44db-15a4-4286-bc58-6e007d69fecf" containerID="9003e4e70b8211cadb28395402fd8317b7e249f606898e99c9a1ef8263b8898b" exitCode=0 Sep 30 13:20:12 crc kubenswrapper[5002]: I0930 13:20:12.890618 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4ppnm/crc-debug-rv75c" event={"ID":"ddaa44db-15a4-4286-bc58-6e007d69fecf","Type":"ContainerDied","Data":"9003e4e70b8211cadb28395402fd8317b7e249f606898e99c9a1ef8263b8898b"} Sep 30 13:20:12 crc kubenswrapper[5002]: I0930 13:20:12.890958 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4ppnm/crc-debug-rv75c" event={"ID":"ddaa44db-15a4-4286-bc58-6e007d69fecf","Type":"ContainerStarted","Data":"78b449a8b1443ad60bc2b8d149243614eab6f07f71726a7e1ca1ade1541bbb0b"} Sep 30 13:20:12 crc kubenswrapper[5002]: I0930 13:20:12.938348 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-4ppnm/crc-debug-rv75c"] Sep 30 13:20:12 crc kubenswrapper[5002]: I0930 13:20:12.949111 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-4ppnm/crc-debug-rv75c"] Sep 30 13:20:13 crc kubenswrapper[5002]: I0930 13:20:13.676701 5002 scope.go:117] "RemoveContainer" containerID="99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b" Sep 30 13:20:13 crc kubenswrapper[5002]: E0930 13:20:13.677349 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:20:14 crc kubenswrapper[5002]: I0930 13:20:14.006815 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4ppnm/crc-debug-rv75c" Sep 30 13:20:14 crc kubenswrapper[5002]: I0930 13:20:14.063567 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7v9lz\" (UniqueName: \"kubernetes.io/projected/ddaa44db-15a4-4286-bc58-6e007d69fecf-kube-api-access-7v9lz\") pod \"ddaa44db-15a4-4286-bc58-6e007d69fecf\" (UID: \"ddaa44db-15a4-4286-bc58-6e007d69fecf\") " Sep 30 13:20:14 crc kubenswrapper[5002]: I0930 13:20:14.063824 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ddaa44db-15a4-4286-bc58-6e007d69fecf-host\") pod \"ddaa44db-15a4-4286-bc58-6e007d69fecf\" (UID: \"ddaa44db-15a4-4286-bc58-6e007d69fecf\") " Sep 30 13:20:14 crc kubenswrapper[5002]: I0930 13:20:14.064226 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ddaa44db-15a4-4286-bc58-6e007d69fecf-host" (OuterVolumeSpecName: "host") pod "ddaa44db-15a4-4286-bc58-6e007d69fecf" (UID: "ddaa44db-15a4-4286-bc58-6e007d69fecf"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:20:14 crc kubenswrapper[5002]: I0930 13:20:14.077782 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddaa44db-15a4-4286-bc58-6e007d69fecf-kube-api-access-7v9lz" (OuterVolumeSpecName: "kube-api-access-7v9lz") pod "ddaa44db-15a4-4286-bc58-6e007d69fecf" (UID: "ddaa44db-15a4-4286-bc58-6e007d69fecf"). InnerVolumeSpecName "kube-api-access-7v9lz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:20:14 crc kubenswrapper[5002]: I0930 13:20:14.165561 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7v9lz\" (UniqueName: \"kubernetes.io/projected/ddaa44db-15a4-4286-bc58-6e007d69fecf-kube-api-access-7v9lz\") on node \"crc\" DevicePath \"\"" Sep 30 13:20:14 crc kubenswrapper[5002]: I0930 13:20:14.165594 5002 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ddaa44db-15a4-4286-bc58-6e007d69fecf-host\") on node \"crc\" DevicePath \"\"" Sep 30 13:20:14 crc kubenswrapper[5002]: I0930 13:20:14.402870 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-mf4jp_636ed6c4-281d-4ea2-be99-a04e07b08170/kube-rbac-proxy/0.log" Sep 30 13:20:14 crc kubenswrapper[5002]: I0930 13:20:14.482979 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-mf4jp_636ed6c4-281d-4ea2-be99-a04e07b08170/manager/0.log" Sep 30 13:20:14 crc kubenswrapper[5002]: I0930 13:20:14.636600 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-ph95z_8eefd962-2b74-4b77-8bc9-338b8ccfd0cf/kube-rbac-proxy/0.log" Sep 30 13:20:14 crc kubenswrapper[5002]: I0930 13:20:14.661024 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-ph95z_8eefd962-2b74-4b77-8bc9-338b8ccfd0cf/manager/0.log" Sep 30 13:20:14 crc kubenswrapper[5002]: I0930 13:20:14.688077 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ddaa44db-15a4-4286-bc58-6e007d69fecf" path="/var/lib/kubelet/pods/ddaa44db-15a4-4286-bc58-6e007d69fecf/volumes" Sep 30 13:20:14 crc kubenswrapper[5002]: I0930 13:20:14.810525 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-rvv9g_9caa184d-b4ff-4419-8f8d-ede2b0b6845e/kube-rbac-proxy/0.log" Sep 30 13:20:14 crc kubenswrapper[5002]: I0930 13:20:14.814016 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-rvv9g_9caa184d-b4ff-4419-8f8d-ede2b0b6845e/manager/0.log" Sep 30 13:20:14 crc kubenswrapper[5002]: I0930 13:20:14.873074 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf_bd5d2600-fe6f-407b-8110-97516c3117cb/util/0.log" Sep 30 13:20:14 crc kubenswrapper[5002]: I0930 13:20:14.909133 5002 scope.go:117] "RemoveContainer" containerID="9003e4e70b8211cadb28395402fd8317b7e249f606898e99c9a1ef8263b8898b" Sep 30 13:20:14 crc kubenswrapper[5002]: I0930 13:20:14.909187 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4ppnm/crc-debug-rv75c" Sep 30 13:20:15 crc kubenswrapper[5002]: I0930 13:20:15.044463 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf_bd5d2600-fe6f-407b-8110-97516c3117cb/util/0.log" Sep 30 13:20:15 crc kubenswrapper[5002]: I0930 13:20:15.059987 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf_bd5d2600-fe6f-407b-8110-97516c3117cb/pull/0.log" Sep 30 13:20:15 crc kubenswrapper[5002]: I0930 13:20:15.075876 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf_bd5d2600-fe6f-407b-8110-97516c3117cb/pull/0.log" Sep 30 13:20:15 crc kubenswrapper[5002]: I0930 13:20:15.212812 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf_bd5d2600-fe6f-407b-8110-97516c3117cb/util/0.log" Sep 30 13:20:15 crc kubenswrapper[5002]: I0930 13:20:15.229816 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf_bd5d2600-fe6f-407b-8110-97516c3117cb/extract/0.log" Sep 30 13:20:15 crc kubenswrapper[5002]: I0930 13:20:15.262701 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf_bd5d2600-fe6f-407b-8110-97516c3117cb/pull/0.log" Sep 30 13:20:15 crc kubenswrapper[5002]: I0930 13:20:15.390627 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-chbd7_d44fe72c-afa7-4442-b308-0b111e16c7b8/kube-rbac-proxy/0.log" Sep 30 13:20:15 crc kubenswrapper[5002]: I0930 13:20:15.460725 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-sjnlc_63ee8874-1cbb-4183-b16a-f2efd8a1e7d6/kube-rbac-proxy/0.log" Sep 30 13:20:15 crc kubenswrapper[5002]: I0930 13:20:15.466506 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-chbd7_d44fe72c-afa7-4442-b308-0b111e16c7b8/manager/0.log" Sep 30 13:20:15 crc kubenswrapper[5002]: I0930 13:20:15.591531 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-sjnlc_63ee8874-1cbb-4183-b16a-f2efd8a1e7d6/manager/0.log" Sep 30 13:20:15 crc kubenswrapper[5002]: I0930 13:20:15.631876 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-42j7m_aa7bbe9e-668f-4ee3-976d-8e9ddbdbb092/kube-rbac-proxy/0.log" Sep 30 13:20:15 crc kubenswrapper[5002]: I0930 13:20:15.685016 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-42j7m_aa7bbe9e-668f-4ee3-976d-8e9ddbdbb092/manager/0.log" Sep 30 13:20:15 crc kubenswrapper[5002]: I0930 13:20:15.813363 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d857cc749-qcq76_7c75d96a-d0a7-4f12-9799-4d01ee215248/kube-rbac-proxy/0.log" Sep 30 13:20:15 crc kubenswrapper[5002]: I0930 13:20:15.962884 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d857cc749-qcq76_7c75d96a-d0a7-4f12-9799-4d01ee215248/manager/0.log" Sep 30 13:20:15 crc kubenswrapper[5002]: I0930 13:20:15.984897 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-r7t4w_720e0316-9060-4bd3-804c-f98017a3fb84/kube-rbac-proxy/0.log" Sep 30 13:20:16 crc kubenswrapper[5002]: I0930 13:20:16.003178 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-r7t4w_720e0316-9060-4bd3-804c-f98017a3fb84/manager/0.log" Sep 30 13:20:16 crc kubenswrapper[5002]: I0930 13:20:16.123381 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-m7m4h_9692c8b9-1e89-47e3-972c-1af7eb8a2ebe/kube-rbac-proxy/0.log" Sep 30 13:20:16 crc kubenswrapper[5002]: I0930 13:20:16.236532 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-m7m4h_9692c8b9-1e89-47e3-972c-1af7eb8a2ebe/manager/0.log" Sep 30 13:20:16 crc kubenswrapper[5002]: I0930 13:20:16.309281 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-zzt9s_855868b4-991b-4f9f-b471-5b1244221192/kube-rbac-proxy/0.log" Sep 30 13:20:16 crc kubenswrapper[5002]: I0930 13:20:16.342229 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-zzt9s_855868b4-991b-4f9f-b471-5b1244221192/manager/0.log" Sep 30 13:20:16 crc kubenswrapper[5002]: I0930 13:20:16.421998 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-sxv7p_aa376f72-5b9a-4087-8ea6-a5cf80be315b/kube-rbac-proxy/0.log" Sep 30 13:20:16 crc kubenswrapper[5002]: I0930 13:20:16.528412 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-sxv7p_aa376f72-5b9a-4087-8ea6-a5cf80be315b/manager/0.log" Sep 30 13:20:16 crc kubenswrapper[5002]: I0930 13:20:16.607971 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-hcr66_6413b1d0-7f0f-4bca-88e9-90a9d78bff9c/kube-rbac-proxy/0.log" Sep 30 13:20:16 crc kubenswrapper[5002]: I0930 13:20:16.663979 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-hcr66_6413b1d0-7f0f-4bca-88e9-90a9d78bff9c/manager/0.log" Sep 30 13:20:16 crc kubenswrapper[5002]: I0930 13:20:16.726907 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-gh66v_2d9e02f5-4644-423a-a783-8dbc51d68570/kube-rbac-proxy/0.log" Sep 30 13:20:16 crc kubenswrapper[5002]: I0930 13:20:16.866902 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-gh66v_2d9e02f5-4644-423a-a783-8dbc51d68570/manager/0.log" Sep 30 13:20:16 crc kubenswrapper[5002]: I0930 13:20:16.928203 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-xwlmz_38eab40a-26bf-4c1b-8911-4d6672629e3e/kube-rbac-proxy/0.log" Sep 30 13:20:16 crc kubenswrapper[5002]: I0930 13:20:16.946924 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-xwlmz_38eab40a-26bf-4c1b-8911-4d6672629e3e/manager/0.log" Sep 30 13:20:17 crc kubenswrapper[5002]: I0930 13:20:17.079041 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-c7j6g_5d79f7da-8bfd-4f26-bcf1-b4ad36a6b42f/kube-rbac-proxy/0.log" Sep 30 13:20:17 crc kubenswrapper[5002]: I0930 13:20:17.112908 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-c7j6g_5d79f7da-8bfd-4f26-bcf1-b4ad36a6b42f/manager/0.log" Sep 30 13:20:17 crc kubenswrapper[5002]: I0930 13:20:17.278257 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-84697dfb4d-w86tm_32ebad0d-e677-4e33-b555-44db1541becc/kube-rbac-proxy/0.log" Sep 30 13:20:17 crc kubenswrapper[5002]: I0930 13:20:17.364133 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-b45798bf6-lpg2f_362e8e35-0b7c-4c4f-9db2-9c3d56a76d2f/kube-rbac-proxy/0.log" Sep 30 13:20:17 crc kubenswrapper[5002]: I0930 13:20:17.630805 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-gc6rt_17eab1ee-1e5f-4092-80e0-77d8a4ca4016/registry-server/0.log" Sep 30 13:20:17 crc kubenswrapper[5002]: I0930 13:20:17.720095 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-b45798bf6-lpg2f_362e8e35-0b7c-4c4f-9db2-9c3d56a76d2f/operator/0.log" Sep 30 13:20:17 crc kubenswrapper[5002]: I0930 13:20:17.844348 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-kl4qt_2607efd2-d90d-4a1e-be6c-5f3c88da67e4/kube-rbac-proxy/0.log" Sep 30 13:20:18 crc kubenswrapper[5002]: I0930 13:20:18.025219 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-kl4qt_2607efd2-d90d-4a1e-be6c-5f3c88da67e4/manager/0.log" Sep 30 13:20:18 crc kubenswrapper[5002]: I0930 13:20:18.072771 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-57btn_0d7297eb-3633-4083-9d4b-3bf8487360ca/manager/0.log" Sep 30 13:20:18 crc kubenswrapper[5002]: I0930 13:20:18.077560 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-57btn_0d7297eb-3633-4083-9d4b-3bf8487360ca/kube-rbac-proxy/0.log" Sep 30 13:20:18 crc kubenswrapper[5002]: I0930 13:20:18.251531 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-79d8469568-h9dzh_b4044bb7-6e63-45d0-9640-7966bcd23aa9/operator/0.log" Sep 30 13:20:18 crc kubenswrapper[5002]: I0930 13:20:18.288066 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-d9jtf_90cc681e-c24d-4b64-862d-3514308e77c6/kube-rbac-proxy/0.log" Sep 30 13:20:18 crc kubenswrapper[5002]: I0930 13:20:18.429442 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-84697dfb4d-w86tm_32ebad0d-e677-4e33-b555-44db1541becc/manager/0.log" Sep 30 13:20:18 crc kubenswrapper[5002]: I0930 13:20:18.475791 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-d9jtf_90cc681e-c24d-4b64-862d-3514308e77c6/manager/0.log" Sep 30 13:20:18 crc kubenswrapper[5002]: I0930 13:20:18.519854 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-7662l_78f98b71-27e2-411c-b610-8b4be1068d5a/kube-rbac-proxy/0.log" Sep 30 13:20:18 crc kubenswrapper[5002]: I0930 13:20:18.559700 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-7662l_78f98b71-27e2-411c-b610-8b4be1068d5a/manager/0.log" Sep 30 13:20:18 crc kubenswrapper[5002]: I0930 13:20:18.671268 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-z2dcj_0fa1c573-cbf9-43f1-8106-7cf73e93f1f3/manager/0.log" Sep 30 13:20:18 crc kubenswrapper[5002]: I0930 13:20:18.707526 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-z2dcj_0fa1c573-cbf9-43f1-8106-7cf73e93f1f3/kube-rbac-proxy/0.log" Sep 30 13:20:18 crc kubenswrapper[5002]: I0930 13:20:18.734197 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-n44h7_e5254af8-649d-413f-b146-51c982f48073/kube-rbac-proxy/0.log" Sep 30 13:20:18 crc kubenswrapper[5002]: I0930 13:20:18.825234 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-n44h7_e5254af8-649d-413f-b146-51c982f48073/manager/0.log" Sep 30 13:20:26 crc kubenswrapper[5002]: I0930 13:20:26.707447 5002 scope.go:117] "RemoveContainer" containerID="99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b" Sep 30 13:20:26 crc kubenswrapper[5002]: E0930 13:20:26.708345 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:20:29 crc kubenswrapper[5002]: I0930 13:20:29.023541 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zcbmk"] Sep 30 13:20:29 crc kubenswrapper[5002]: E0930 13:20:29.024371 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddaa44db-15a4-4286-bc58-6e007d69fecf" containerName="container-00" Sep 30 13:20:29 crc kubenswrapper[5002]: I0930 13:20:29.024388 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddaa44db-15a4-4286-bc58-6e007d69fecf" containerName="container-00" Sep 30 13:20:29 crc kubenswrapper[5002]: I0930 13:20:29.024662 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddaa44db-15a4-4286-bc58-6e007d69fecf" containerName="container-00" Sep 30 13:20:29 crc kubenswrapper[5002]: I0930 13:20:29.026337 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zcbmk" Sep 30 13:20:29 crc kubenswrapper[5002]: I0930 13:20:29.040428 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zcbmk"] Sep 30 13:20:29 crc kubenswrapper[5002]: I0930 13:20:29.141705 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f62ce328-e2fc-46f3-84a3-bd873b0d9b3f-utilities\") pod \"redhat-marketplace-zcbmk\" (UID: \"f62ce328-e2fc-46f3-84a3-bd873b0d9b3f\") " pod="openshift-marketplace/redhat-marketplace-zcbmk" Sep 30 13:20:29 crc kubenswrapper[5002]: I0930 13:20:29.141858 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zp4zz\" (UniqueName: \"kubernetes.io/projected/f62ce328-e2fc-46f3-84a3-bd873b0d9b3f-kube-api-access-zp4zz\") pod \"redhat-marketplace-zcbmk\" (UID: \"f62ce328-e2fc-46f3-84a3-bd873b0d9b3f\") " pod="openshift-marketplace/redhat-marketplace-zcbmk" Sep 30 13:20:29 crc kubenswrapper[5002]: I0930 13:20:29.141912 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f62ce328-e2fc-46f3-84a3-bd873b0d9b3f-catalog-content\") pod \"redhat-marketplace-zcbmk\" (UID: \"f62ce328-e2fc-46f3-84a3-bd873b0d9b3f\") " pod="openshift-marketplace/redhat-marketplace-zcbmk" Sep 30 13:20:29 crc kubenswrapper[5002]: I0930 13:20:29.243911 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zp4zz\" (UniqueName: \"kubernetes.io/projected/f62ce328-e2fc-46f3-84a3-bd873b0d9b3f-kube-api-access-zp4zz\") pod \"redhat-marketplace-zcbmk\" (UID: \"f62ce328-e2fc-46f3-84a3-bd873b0d9b3f\") " pod="openshift-marketplace/redhat-marketplace-zcbmk" Sep 30 13:20:29 crc kubenswrapper[5002]: I0930 13:20:29.244026 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f62ce328-e2fc-46f3-84a3-bd873b0d9b3f-catalog-content\") pod \"redhat-marketplace-zcbmk\" (UID: \"f62ce328-e2fc-46f3-84a3-bd873b0d9b3f\") " pod="openshift-marketplace/redhat-marketplace-zcbmk" Sep 30 13:20:29 crc kubenswrapper[5002]: I0930 13:20:29.244068 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f62ce328-e2fc-46f3-84a3-bd873b0d9b3f-utilities\") pod \"redhat-marketplace-zcbmk\" (UID: \"f62ce328-e2fc-46f3-84a3-bd873b0d9b3f\") " pod="openshift-marketplace/redhat-marketplace-zcbmk" Sep 30 13:20:29 crc kubenswrapper[5002]: I0930 13:20:29.244681 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f62ce328-e2fc-46f3-84a3-bd873b0d9b3f-catalog-content\") pod \"redhat-marketplace-zcbmk\" (UID: \"f62ce328-e2fc-46f3-84a3-bd873b0d9b3f\") " pod="openshift-marketplace/redhat-marketplace-zcbmk" Sep 30 13:20:29 crc kubenswrapper[5002]: I0930 13:20:29.244726 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f62ce328-e2fc-46f3-84a3-bd873b0d9b3f-utilities\") pod \"redhat-marketplace-zcbmk\" (UID: \"f62ce328-e2fc-46f3-84a3-bd873b0d9b3f\") " pod="openshift-marketplace/redhat-marketplace-zcbmk" Sep 30 13:20:29 crc kubenswrapper[5002]: I0930 13:20:29.265520 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zp4zz\" (UniqueName: \"kubernetes.io/projected/f62ce328-e2fc-46f3-84a3-bd873b0d9b3f-kube-api-access-zp4zz\") pod \"redhat-marketplace-zcbmk\" (UID: \"f62ce328-e2fc-46f3-84a3-bd873b0d9b3f\") " pod="openshift-marketplace/redhat-marketplace-zcbmk" Sep 30 13:20:29 crc kubenswrapper[5002]: I0930 13:20:29.348853 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zcbmk" Sep 30 13:20:29 crc kubenswrapper[5002]: I0930 13:20:29.793791 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zcbmk"] Sep 30 13:20:30 crc kubenswrapper[5002]: I0930 13:20:30.041981 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zcbmk" event={"ID":"f62ce328-e2fc-46f3-84a3-bd873b0d9b3f","Type":"ContainerStarted","Data":"5bb19a15dd151fe8026778425e63c3dcb57dff118b4051f7230f8f355742f7c4"} Sep 30 13:20:30 crc kubenswrapper[5002]: I0930 13:20:30.042279 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zcbmk" event={"ID":"f62ce328-e2fc-46f3-84a3-bd873b0d9b3f","Type":"ContainerStarted","Data":"b74f5ea10fcbc89008bf3be56de7b36e26a6fd034eecc6347a9299a7d382f0d2"} Sep 30 13:20:31 crc kubenswrapper[5002]: I0930 13:20:31.053086 5002 generic.go:334] "Generic (PLEG): container finished" podID="f62ce328-e2fc-46f3-84a3-bd873b0d9b3f" containerID="5bb19a15dd151fe8026778425e63c3dcb57dff118b4051f7230f8f355742f7c4" exitCode=0 Sep 30 13:20:31 crc kubenswrapper[5002]: I0930 13:20:31.053143 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zcbmk" event={"ID":"f62ce328-e2fc-46f3-84a3-bd873b0d9b3f","Type":"ContainerDied","Data":"5bb19a15dd151fe8026778425e63c3dcb57dff118b4051f7230f8f355742f7c4"} Sep 30 13:20:31 crc kubenswrapper[5002]: I0930 13:20:31.055462 5002 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 13:20:32 crc kubenswrapper[5002]: I0930 13:20:32.942424 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-2pmxc_f27fcf7d-17db-407a-b6ee-e34779332edf/control-plane-machine-set-operator/0.log" Sep 30 13:20:33 crc kubenswrapper[5002]: I0930 13:20:33.073537 5002 generic.go:334] "Generic (PLEG): container finished" podID="f62ce328-e2fc-46f3-84a3-bd873b0d9b3f" containerID="f59d8718e234cd78a31c9a719c2d0a4ab33b514e814a31c8e03ec986576198df" exitCode=0 Sep 30 13:20:33 crc kubenswrapper[5002]: I0930 13:20:33.073583 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zcbmk" event={"ID":"f62ce328-e2fc-46f3-84a3-bd873b0d9b3f","Type":"ContainerDied","Data":"f59d8718e234cd78a31c9a719c2d0a4ab33b514e814a31c8e03ec986576198df"} Sep 30 13:20:33 crc kubenswrapper[5002]: I0930 13:20:33.138429 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-fp74c_ccd47538-6f91-4c6d-91b0-afccf0c83b20/kube-rbac-proxy/0.log" Sep 30 13:20:33 crc kubenswrapper[5002]: I0930 13:20:33.216874 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-fp74c_ccd47538-6f91-4c6d-91b0-afccf0c83b20/machine-api-operator/0.log" Sep 30 13:20:34 crc kubenswrapper[5002]: I0930 13:20:34.082530 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zcbmk" event={"ID":"f62ce328-e2fc-46f3-84a3-bd873b0d9b3f","Type":"ContainerStarted","Data":"bc78504e8e75fa2a52c0da0c79de31c27e01c2dd983a90c2816a90da2362f498"} Sep 30 13:20:34 crc kubenswrapper[5002]: I0930 13:20:34.108441 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zcbmk" podStartSLOduration=2.432811425 podStartE2EDuration="5.108419331s" podCreationTimestamp="2025-09-30 13:20:29 +0000 UTC" firstStartedPulling="2025-09-30 13:20:31.055224518 +0000 UTC m=+3605.304906664" lastFinishedPulling="2025-09-30 13:20:33.730832424 +0000 UTC m=+3607.980514570" observedRunningTime="2025-09-30 13:20:34.102219006 +0000 UTC m=+3608.351901162" watchObservedRunningTime="2025-09-30 13:20:34.108419331 +0000 UTC m=+3608.358101477" Sep 30 13:20:34 crc kubenswrapper[5002]: I0930 13:20:34.822995 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2htmk"] Sep 30 13:20:34 crc kubenswrapper[5002]: I0930 13:20:34.824903 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2htmk" Sep 30 13:20:34 crc kubenswrapper[5002]: I0930 13:20:34.833596 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2htmk"] Sep 30 13:20:34 crc kubenswrapper[5002]: I0930 13:20:34.954791 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0197db8-9b97-4795-be59-5ed5bc8899b1-utilities\") pod \"community-operators-2htmk\" (UID: \"b0197db8-9b97-4795-be59-5ed5bc8899b1\") " pod="openshift-marketplace/community-operators-2htmk" Sep 30 13:20:34 crc kubenswrapper[5002]: I0930 13:20:34.954861 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0197db8-9b97-4795-be59-5ed5bc8899b1-catalog-content\") pod \"community-operators-2htmk\" (UID: \"b0197db8-9b97-4795-be59-5ed5bc8899b1\") " pod="openshift-marketplace/community-operators-2htmk" Sep 30 13:20:34 crc kubenswrapper[5002]: I0930 13:20:34.955175 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jgrh\" (UniqueName: \"kubernetes.io/projected/b0197db8-9b97-4795-be59-5ed5bc8899b1-kube-api-access-4jgrh\") pod \"community-operators-2htmk\" (UID: \"b0197db8-9b97-4795-be59-5ed5bc8899b1\") " pod="openshift-marketplace/community-operators-2htmk" Sep 30 13:20:35 crc kubenswrapper[5002]: I0930 13:20:35.057543 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jgrh\" (UniqueName: \"kubernetes.io/projected/b0197db8-9b97-4795-be59-5ed5bc8899b1-kube-api-access-4jgrh\") pod \"community-operators-2htmk\" (UID: \"b0197db8-9b97-4795-be59-5ed5bc8899b1\") " pod="openshift-marketplace/community-operators-2htmk" Sep 30 13:20:35 crc kubenswrapper[5002]: I0930 13:20:35.057664 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0197db8-9b97-4795-be59-5ed5bc8899b1-utilities\") pod \"community-operators-2htmk\" (UID: \"b0197db8-9b97-4795-be59-5ed5bc8899b1\") " pod="openshift-marketplace/community-operators-2htmk" Sep 30 13:20:35 crc kubenswrapper[5002]: I0930 13:20:35.057701 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0197db8-9b97-4795-be59-5ed5bc8899b1-catalog-content\") pod \"community-operators-2htmk\" (UID: \"b0197db8-9b97-4795-be59-5ed5bc8899b1\") " pod="openshift-marketplace/community-operators-2htmk" Sep 30 13:20:35 crc kubenswrapper[5002]: I0930 13:20:35.058219 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0197db8-9b97-4795-be59-5ed5bc8899b1-catalog-content\") pod \"community-operators-2htmk\" (UID: \"b0197db8-9b97-4795-be59-5ed5bc8899b1\") " pod="openshift-marketplace/community-operators-2htmk" Sep 30 13:20:35 crc kubenswrapper[5002]: I0930 13:20:35.058342 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0197db8-9b97-4795-be59-5ed5bc8899b1-utilities\") pod \"community-operators-2htmk\" (UID: \"b0197db8-9b97-4795-be59-5ed5bc8899b1\") " pod="openshift-marketplace/community-operators-2htmk" Sep 30 13:20:35 crc kubenswrapper[5002]: I0930 13:20:35.090439 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jgrh\" (UniqueName: \"kubernetes.io/projected/b0197db8-9b97-4795-be59-5ed5bc8899b1-kube-api-access-4jgrh\") pod \"community-operators-2htmk\" (UID: \"b0197db8-9b97-4795-be59-5ed5bc8899b1\") " pod="openshift-marketplace/community-operators-2htmk" Sep 30 13:20:35 crc kubenswrapper[5002]: I0930 13:20:35.148677 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2htmk" Sep 30 13:20:35 crc kubenswrapper[5002]: I0930 13:20:35.678560 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2htmk"] Sep 30 13:20:35 crc kubenswrapper[5002]: W0930 13:20:35.681941 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb0197db8_9b97_4795_be59_5ed5bc8899b1.slice/crio-d60a7f3f6832f10dea542ad37c081cabf995fd3b4223c296feb5343f8e724ea4 WatchSource:0}: Error finding container d60a7f3f6832f10dea542ad37c081cabf995fd3b4223c296feb5343f8e724ea4: Status 404 returned error can't find the container with id d60a7f3f6832f10dea542ad37c081cabf995fd3b4223c296feb5343f8e724ea4 Sep 30 13:20:36 crc kubenswrapper[5002]: I0930 13:20:36.107510 5002 generic.go:334] "Generic (PLEG): container finished" podID="b0197db8-9b97-4795-be59-5ed5bc8899b1" containerID="548bb320433e0011a794c1a341eeb2f5943bac8850cea7c2e7aa5a8551eca8ab" exitCode=0 Sep 30 13:20:36 crc kubenswrapper[5002]: I0930 13:20:36.107612 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2htmk" event={"ID":"b0197db8-9b97-4795-be59-5ed5bc8899b1","Type":"ContainerDied","Data":"548bb320433e0011a794c1a341eeb2f5943bac8850cea7c2e7aa5a8551eca8ab"} Sep 30 13:20:36 crc kubenswrapper[5002]: I0930 13:20:36.107762 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2htmk" event={"ID":"b0197db8-9b97-4795-be59-5ed5bc8899b1","Type":"ContainerStarted","Data":"d60a7f3f6832f10dea542ad37c081cabf995fd3b4223c296feb5343f8e724ea4"} Sep 30 13:20:38 crc kubenswrapper[5002]: I0930 13:20:38.128897 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2htmk" event={"ID":"b0197db8-9b97-4795-be59-5ed5bc8899b1","Type":"ContainerStarted","Data":"55874bd60ad07655ee06710b0912a19bef3afb9d5bb60377ff2c830eb9951886"} Sep 30 13:20:38 crc kubenswrapper[5002]: I0930 13:20:38.676440 5002 scope.go:117] "RemoveContainer" containerID="99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b" Sep 30 13:20:38 crc kubenswrapper[5002]: E0930 13:20:38.676769 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:20:39 crc kubenswrapper[5002]: I0930 13:20:39.349775 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zcbmk" Sep 30 13:20:39 crc kubenswrapper[5002]: I0930 13:20:39.350059 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zcbmk" Sep 30 13:20:39 crc kubenswrapper[5002]: I0930 13:20:39.392147 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zcbmk" Sep 30 13:20:40 crc kubenswrapper[5002]: I0930 13:20:40.167370 5002 generic.go:334] "Generic (PLEG): container finished" podID="b0197db8-9b97-4795-be59-5ed5bc8899b1" containerID="55874bd60ad07655ee06710b0912a19bef3afb9d5bb60377ff2c830eb9951886" exitCode=0 Sep 30 13:20:40 crc kubenswrapper[5002]: I0930 13:20:40.168185 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2htmk" event={"ID":"b0197db8-9b97-4795-be59-5ed5bc8899b1","Type":"ContainerDied","Data":"55874bd60ad07655ee06710b0912a19bef3afb9d5bb60377ff2c830eb9951886"} Sep 30 13:20:40 crc kubenswrapper[5002]: I0930 13:20:40.213899 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zcbmk" Sep 30 13:20:41 crc kubenswrapper[5002]: I0930 13:20:41.178118 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2htmk" event={"ID":"b0197db8-9b97-4795-be59-5ed5bc8899b1","Type":"ContainerStarted","Data":"a085aace546f65ae83fe9deb3f7aea3ff69c9e0ab01214bb60f70cf8cd26ca8f"} Sep 30 13:20:41 crc kubenswrapper[5002]: I0930 13:20:41.221486 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2htmk" podStartSLOduration=2.761798866 podStartE2EDuration="7.221450397s" podCreationTimestamp="2025-09-30 13:20:34 +0000 UTC" firstStartedPulling="2025-09-30 13:20:36.109047191 +0000 UTC m=+3610.358729337" lastFinishedPulling="2025-09-30 13:20:40.568698722 +0000 UTC m=+3614.818380868" observedRunningTime="2025-09-30 13:20:41.218286782 +0000 UTC m=+3615.467968948" watchObservedRunningTime="2025-09-30 13:20:41.221450397 +0000 UTC m=+3615.471132543" Sep 30 13:20:41 crc kubenswrapper[5002]: I0930 13:20:41.813928 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zcbmk"] Sep 30 13:20:42 crc kubenswrapper[5002]: I0930 13:20:42.186066 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-zcbmk" podUID="f62ce328-e2fc-46f3-84a3-bd873b0d9b3f" containerName="registry-server" containerID="cri-o://bc78504e8e75fa2a52c0da0c79de31c27e01c2dd983a90c2816a90da2362f498" gracePeriod=2 Sep 30 13:20:42 crc kubenswrapper[5002]: I0930 13:20:42.660300 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zcbmk" Sep 30 13:20:42 crc kubenswrapper[5002]: I0930 13:20:42.806112 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f62ce328-e2fc-46f3-84a3-bd873b0d9b3f-utilities\") pod \"f62ce328-e2fc-46f3-84a3-bd873b0d9b3f\" (UID: \"f62ce328-e2fc-46f3-84a3-bd873b0d9b3f\") " Sep 30 13:20:42 crc kubenswrapper[5002]: I0930 13:20:42.806262 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f62ce328-e2fc-46f3-84a3-bd873b0d9b3f-catalog-content\") pod \"f62ce328-e2fc-46f3-84a3-bd873b0d9b3f\" (UID: \"f62ce328-e2fc-46f3-84a3-bd873b0d9b3f\") " Sep 30 13:20:42 crc kubenswrapper[5002]: I0930 13:20:42.806302 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zp4zz\" (UniqueName: \"kubernetes.io/projected/f62ce328-e2fc-46f3-84a3-bd873b0d9b3f-kube-api-access-zp4zz\") pod \"f62ce328-e2fc-46f3-84a3-bd873b0d9b3f\" (UID: \"f62ce328-e2fc-46f3-84a3-bd873b0d9b3f\") " Sep 30 13:20:42 crc kubenswrapper[5002]: I0930 13:20:42.807058 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f62ce328-e2fc-46f3-84a3-bd873b0d9b3f-utilities" (OuterVolumeSpecName: "utilities") pod "f62ce328-e2fc-46f3-84a3-bd873b0d9b3f" (UID: "f62ce328-e2fc-46f3-84a3-bd873b0d9b3f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:20:42 crc kubenswrapper[5002]: I0930 13:20:42.812703 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f62ce328-e2fc-46f3-84a3-bd873b0d9b3f-kube-api-access-zp4zz" (OuterVolumeSpecName: "kube-api-access-zp4zz") pod "f62ce328-e2fc-46f3-84a3-bd873b0d9b3f" (UID: "f62ce328-e2fc-46f3-84a3-bd873b0d9b3f"). InnerVolumeSpecName "kube-api-access-zp4zz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:20:42 crc kubenswrapper[5002]: I0930 13:20:42.818704 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f62ce328-e2fc-46f3-84a3-bd873b0d9b3f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f62ce328-e2fc-46f3-84a3-bd873b0d9b3f" (UID: "f62ce328-e2fc-46f3-84a3-bd873b0d9b3f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:20:42 crc kubenswrapper[5002]: I0930 13:20:42.908535 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f62ce328-e2fc-46f3-84a3-bd873b0d9b3f-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 13:20:42 crc kubenswrapper[5002]: I0930 13:20:42.908568 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f62ce328-e2fc-46f3-84a3-bd873b0d9b3f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 13:20:42 crc kubenswrapper[5002]: I0930 13:20:42.908581 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zp4zz\" (UniqueName: \"kubernetes.io/projected/f62ce328-e2fc-46f3-84a3-bd873b0d9b3f-kube-api-access-zp4zz\") on node \"crc\" DevicePath \"\"" Sep 30 13:20:43 crc kubenswrapper[5002]: I0930 13:20:43.198447 5002 generic.go:334] "Generic (PLEG): container finished" podID="f62ce328-e2fc-46f3-84a3-bd873b0d9b3f" containerID="bc78504e8e75fa2a52c0da0c79de31c27e01c2dd983a90c2816a90da2362f498" exitCode=0 Sep 30 13:20:43 crc kubenswrapper[5002]: I0930 13:20:43.198515 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zcbmk" event={"ID":"f62ce328-e2fc-46f3-84a3-bd873b0d9b3f","Type":"ContainerDied","Data":"bc78504e8e75fa2a52c0da0c79de31c27e01c2dd983a90c2816a90da2362f498"} Sep 30 13:20:43 crc kubenswrapper[5002]: I0930 13:20:43.198736 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zcbmk" event={"ID":"f62ce328-e2fc-46f3-84a3-bd873b0d9b3f","Type":"ContainerDied","Data":"b74f5ea10fcbc89008bf3be56de7b36e26a6fd034eecc6347a9299a7d382f0d2"} Sep 30 13:20:43 crc kubenswrapper[5002]: I0930 13:20:43.198757 5002 scope.go:117] "RemoveContainer" containerID="bc78504e8e75fa2a52c0da0c79de31c27e01c2dd983a90c2816a90da2362f498" Sep 30 13:20:43 crc kubenswrapper[5002]: I0930 13:20:43.198583 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zcbmk" Sep 30 13:20:43 crc kubenswrapper[5002]: I0930 13:20:43.220120 5002 scope.go:117] "RemoveContainer" containerID="f59d8718e234cd78a31c9a719c2d0a4ab33b514e814a31c8e03ec986576198df" Sep 30 13:20:43 crc kubenswrapper[5002]: I0930 13:20:43.247810 5002 scope.go:117] "RemoveContainer" containerID="5bb19a15dd151fe8026778425e63c3dcb57dff118b4051f7230f8f355742f7c4" Sep 30 13:20:43 crc kubenswrapper[5002]: I0930 13:20:43.253433 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zcbmk"] Sep 30 13:20:43 crc kubenswrapper[5002]: I0930 13:20:43.265555 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-zcbmk"] Sep 30 13:20:43 crc kubenswrapper[5002]: I0930 13:20:43.286272 5002 scope.go:117] "RemoveContainer" containerID="bc78504e8e75fa2a52c0da0c79de31c27e01c2dd983a90c2816a90da2362f498" Sep 30 13:20:43 crc kubenswrapper[5002]: E0930 13:20:43.286819 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc78504e8e75fa2a52c0da0c79de31c27e01c2dd983a90c2816a90da2362f498\": container with ID starting with bc78504e8e75fa2a52c0da0c79de31c27e01c2dd983a90c2816a90da2362f498 not found: ID does not exist" containerID="bc78504e8e75fa2a52c0da0c79de31c27e01c2dd983a90c2816a90da2362f498" Sep 30 13:20:43 crc kubenswrapper[5002]: I0930 13:20:43.286869 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc78504e8e75fa2a52c0da0c79de31c27e01c2dd983a90c2816a90da2362f498"} err="failed to get container status \"bc78504e8e75fa2a52c0da0c79de31c27e01c2dd983a90c2816a90da2362f498\": rpc error: code = NotFound desc = could not find container \"bc78504e8e75fa2a52c0da0c79de31c27e01c2dd983a90c2816a90da2362f498\": container with ID starting with bc78504e8e75fa2a52c0da0c79de31c27e01c2dd983a90c2816a90da2362f498 not found: ID does not exist" Sep 30 13:20:43 crc kubenswrapper[5002]: I0930 13:20:43.286898 5002 scope.go:117] "RemoveContainer" containerID="f59d8718e234cd78a31c9a719c2d0a4ab33b514e814a31c8e03ec986576198df" Sep 30 13:20:43 crc kubenswrapper[5002]: E0930 13:20:43.287216 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f59d8718e234cd78a31c9a719c2d0a4ab33b514e814a31c8e03ec986576198df\": container with ID starting with f59d8718e234cd78a31c9a719c2d0a4ab33b514e814a31c8e03ec986576198df not found: ID does not exist" containerID="f59d8718e234cd78a31c9a719c2d0a4ab33b514e814a31c8e03ec986576198df" Sep 30 13:20:43 crc kubenswrapper[5002]: I0930 13:20:43.287274 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f59d8718e234cd78a31c9a719c2d0a4ab33b514e814a31c8e03ec986576198df"} err="failed to get container status \"f59d8718e234cd78a31c9a719c2d0a4ab33b514e814a31c8e03ec986576198df\": rpc error: code = NotFound desc = could not find container \"f59d8718e234cd78a31c9a719c2d0a4ab33b514e814a31c8e03ec986576198df\": container with ID starting with f59d8718e234cd78a31c9a719c2d0a4ab33b514e814a31c8e03ec986576198df not found: ID does not exist" Sep 30 13:20:43 crc kubenswrapper[5002]: I0930 13:20:43.287306 5002 scope.go:117] "RemoveContainer" containerID="5bb19a15dd151fe8026778425e63c3dcb57dff118b4051f7230f8f355742f7c4" Sep 30 13:20:43 crc kubenswrapper[5002]: E0930 13:20:43.287674 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5bb19a15dd151fe8026778425e63c3dcb57dff118b4051f7230f8f355742f7c4\": container with ID starting with 5bb19a15dd151fe8026778425e63c3dcb57dff118b4051f7230f8f355742f7c4 not found: ID does not exist" containerID="5bb19a15dd151fe8026778425e63c3dcb57dff118b4051f7230f8f355742f7c4" Sep 30 13:20:43 crc kubenswrapper[5002]: I0930 13:20:43.287700 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bb19a15dd151fe8026778425e63c3dcb57dff118b4051f7230f8f355742f7c4"} err="failed to get container status \"5bb19a15dd151fe8026778425e63c3dcb57dff118b4051f7230f8f355742f7c4\": rpc error: code = NotFound desc = could not find container \"5bb19a15dd151fe8026778425e63c3dcb57dff118b4051f7230f8f355742f7c4\": container with ID starting with 5bb19a15dd151fe8026778425e63c3dcb57dff118b4051f7230f8f355742f7c4 not found: ID does not exist" Sep 30 13:20:44 crc kubenswrapper[5002]: I0930 13:20:44.688777 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f62ce328-e2fc-46f3-84a3-bd873b0d9b3f" path="/var/lib/kubelet/pods/f62ce328-e2fc-46f3-84a3-bd873b0d9b3f/volumes" Sep 30 13:20:45 crc kubenswrapper[5002]: I0930 13:20:45.149912 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2htmk" Sep 30 13:20:45 crc kubenswrapper[5002]: I0930 13:20:45.150000 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2htmk" Sep 30 13:20:45 crc kubenswrapper[5002]: I0930 13:20:45.209396 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2htmk" Sep 30 13:20:45 crc kubenswrapper[5002]: I0930 13:20:45.277961 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2htmk" Sep 30 13:20:45 crc kubenswrapper[5002]: I0930 13:20:45.437887 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-6z4b2_fa128ff5-c231-478a-8a20-a617f7187459/cert-manager-controller/0.log" Sep 30 13:20:45 crc kubenswrapper[5002]: I0930 13:20:45.567439 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-974dk_cf984127-3ede-48b0-84a6-aaa1c3c321c1/cert-manager-cainjector/0.log" Sep 30 13:20:45 crc kubenswrapper[5002]: I0930 13:20:45.646313 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-h79vx_22886b45-e205-4eed-8610-087e217a2f3e/cert-manager-webhook/0.log" Sep 30 13:20:46 crc kubenswrapper[5002]: I0930 13:20:46.012345 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2htmk"] Sep 30 13:20:47 crc kubenswrapper[5002]: I0930 13:20:47.231135 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2htmk" podUID="b0197db8-9b97-4795-be59-5ed5bc8899b1" containerName="registry-server" containerID="cri-o://a085aace546f65ae83fe9deb3f7aea3ff69c9e0ab01214bb60f70cf8cd26ca8f" gracePeriod=2 Sep 30 13:20:47 crc kubenswrapper[5002]: E0930 13:20:47.293970 5002 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb0197db8_9b97_4795_be59_5ed5bc8899b1.slice/crio-a085aace546f65ae83fe9deb3f7aea3ff69c9e0ab01214bb60f70cf8cd26ca8f.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb0197db8_9b97_4795_be59_5ed5bc8899b1.slice/crio-conmon-a085aace546f65ae83fe9deb3f7aea3ff69c9e0ab01214bb60f70cf8cd26ca8f.scope\": RecentStats: unable to find data in memory cache]" Sep 30 13:20:47 crc kubenswrapper[5002]: I0930 13:20:47.698825 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2htmk" Sep 30 13:20:47 crc kubenswrapper[5002]: I0930 13:20:47.797755 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0197db8-9b97-4795-be59-5ed5bc8899b1-utilities\") pod \"b0197db8-9b97-4795-be59-5ed5bc8899b1\" (UID: \"b0197db8-9b97-4795-be59-5ed5bc8899b1\") " Sep 30 13:20:47 crc kubenswrapper[5002]: I0930 13:20:47.798155 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4jgrh\" (UniqueName: \"kubernetes.io/projected/b0197db8-9b97-4795-be59-5ed5bc8899b1-kube-api-access-4jgrh\") pod \"b0197db8-9b97-4795-be59-5ed5bc8899b1\" (UID: \"b0197db8-9b97-4795-be59-5ed5bc8899b1\") " Sep 30 13:20:47 crc kubenswrapper[5002]: I0930 13:20:47.798305 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0197db8-9b97-4795-be59-5ed5bc8899b1-catalog-content\") pod \"b0197db8-9b97-4795-be59-5ed5bc8899b1\" (UID: \"b0197db8-9b97-4795-be59-5ed5bc8899b1\") " Sep 30 13:20:47 crc kubenswrapper[5002]: I0930 13:20:47.798918 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0197db8-9b97-4795-be59-5ed5bc8899b1-utilities" (OuterVolumeSpecName: "utilities") pod "b0197db8-9b97-4795-be59-5ed5bc8899b1" (UID: "b0197db8-9b97-4795-be59-5ed5bc8899b1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:20:47 crc kubenswrapper[5002]: I0930 13:20:47.800172 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0197db8-9b97-4795-be59-5ed5bc8899b1-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 13:20:47 crc kubenswrapper[5002]: I0930 13:20:47.804031 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0197db8-9b97-4795-be59-5ed5bc8899b1-kube-api-access-4jgrh" (OuterVolumeSpecName: "kube-api-access-4jgrh") pod "b0197db8-9b97-4795-be59-5ed5bc8899b1" (UID: "b0197db8-9b97-4795-be59-5ed5bc8899b1"). InnerVolumeSpecName "kube-api-access-4jgrh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:20:47 crc kubenswrapper[5002]: I0930 13:20:47.858344 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0197db8-9b97-4795-be59-5ed5bc8899b1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b0197db8-9b97-4795-be59-5ed5bc8899b1" (UID: "b0197db8-9b97-4795-be59-5ed5bc8899b1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:20:47 crc kubenswrapper[5002]: I0930 13:20:47.902097 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4jgrh\" (UniqueName: \"kubernetes.io/projected/b0197db8-9b97-4795-be59-5ed5bc8899b1-kube-api-access-4jgrh\") on node \"crc\" DevicePath \"\"" Sep 30 13:20:47 crc kubenswrapper[5002]: I0930 13:20:47.902167 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0197db8-9b97-4795-be59-5ed5bc8899b1-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 13:20:48 crc kubenswrapper[5002]: I0930 13:20:48.240567 5002 generic.go:334] "Generic (PLEG): container finished" podID="b0197db8-9b97-4795-be59-5ed5bc8899b1" containerID="a085aace546f65ae83fe9deb3f7aea3ff69c9e0ab01214bb60f70cf8cd26ca8f" exitCode=0 Sep 30 13:20:48 crc kubenswrapper[5002]: I0930 13:20:48.240620 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2htmk" event={"ID":"b0197db8-9b97-4795-be59-5ed5bc8899b1","Type":"ContainerDied","Data":"a085aace546f65ae83fe9deb3f7aea3ff69c9e0ab01214bb60f70cf8cd26ca8f"} Sep 30 13:20:48 crc kubenswrapper[5002]: I0930 13:20:48.240649 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2htmk" event={"ID":"b0197db8-9b97-4795-be59-5ed5bc8899b1","Type":"ContainerDied","Data":"d60a7f3f6832f10dea542ad37c081cabf995fd3b4223c296feb5343f8e724ea4"} Sep 30 13:20:48 crc kubenswrapper[5002]: I0930 13:20:48.240666 5002 scope.go:117] "RemoveContainer" containerID="a085aace546f65ae83fe9deb3f7aea3ff69c9e0ab01214bb60f70cf8cd26ca8f" Sep 30 13:20:48 crc kubenswrapper[5002]: I0930 13:20:48.240675 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2htmk" Sep 30 13:20:48 crc kubenswrapper[5002]: I0930 13:20:48.288625 5002 scope.go:117] "RemoveContainer" containerID="55874bd60ad07655ee06710b0912a19bef3afb9d5bb60377ff2c830eb9951886" Sep 30 13:20:48 crc kubenswrapper[5002]: I0930 13:20:48.290754 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2htmk"] Sep 30 13:20:48 crc kubenswrapper[5002]: I0930 13:20:48.301235 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2htmk"] Sep 30 13:20:48 crc kubenswrapper[5002]: I0930 13:20:48.312286 5002 scope.go:117] "RemoveContainer" containerID="548bb320433e0011a794c1a341eeb2f5943bac8850cea7c2e7aa5a8551eca8ab" Sep 30 13:20:48 crc kubenswrapper[5002]: I0930 13:20:48.361720 5002 scope.go:117] "RemoveContainer" containerID="a085aace546f65ae83fe9deb3f7aea3ff69c9e0ab01214bb60f70cf8cd26ca8f" Sep 30 13:20:48 crc kubenswrapper[5002]: E0930 13:20:48.362169 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a085aace546f65ae83fe9deb3f7aea3ff69c9e0ab01214bb60f70cf8cd26ca8f\": container with ID starting with a085aace546f65ae83fe9deb3f7aea3ff69c9e0ab01214bb60f70cf8cd26ca8f not found: ID does not exist" containerID="a085aace546f65ae83fe9deb3f7aea3ff69c9e0ab01214bb60f70cf8cd26ca8f" Sep 30 13:20:48 crc kubenswrapper[5002]: I0930 13:20:48.362214 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a085aace546f65ae83fe9deb3f7aea3ff69c9e0ab01214bb60f70cf8cd26ca8f"} err="failed to get container status \"a085aace546f65ae83fe9deb3f7aea3ff69c9e0ab01214bb60f70cf8cd26ca8f\": rpc error: code = NotFound desc = could not find container \"a085aace546f65ae83fe9deb3f7aea3ff69c9e0ab01214bb60f70cf8cd26ca8f\": container with ID starting with a085aace546f65ae83fe9deb3f7aea3ff69c9e0ab01214bb60f70cf8cd26ca8f not found: ID does not exist" Sep 30 13:20:48 crc kubenswrapper[5002]: I0930 13:20:48.362243 5002 scope.go:117] "RemoveContainer" containerID="55874bd60ad07655ee06710b0912a19bef3afb9d5bb60377ff2c830eb9951886" Sep 30 13:20:48 crc kubenswrapper[5002]: E0930 13:20:48.362640 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55874bd60ad07655ee06710b0912a19bef3afb9d5bb60377ff2c830eb9951886\": container with ID starting with 55874bd60ad07655ee06710b0912a19bef3afb9d5bb60377ff2c830eb9951886 not found: ID does not exist" containerID="55874bd60ad07655ee06710b0912a19bef3afb9d5bb60377ff2c830eb9951886" Sep 30 13:20:48 crc kubenswrapper[5002]: I0930 13:20:48.362677 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55874bd60ad07655ee06710b0912a19bef3afb9d5bb60377ff2c830eb9951886"} err="failed to get container status \"55874bd60ad07655ee06710b0912a19bef3afb9d5bb60377ff2c830eb9951886\": rpc error: code = NotFound desc = could not find container \"55874bd60ad07655ee06710b0912a19bef3afb9d5bb60377ff2c830eb9951886\": container with ID starting with 55874bd60ad07655ee06710b0912a19bef3afb9d5bb60377ff2c830eb9951886 not found: ID does not exist" Sep 30 13:20:48 crc kubenswrapper[5002]: I0930 13:20:48.362701 5002 scope.go:117] "RemoveContainer" containerID="548bb320433e0011a794c1a341eeb2f5943bac8850cea7c2e7aa5a8551eca8ab" Sep 30 13:20:48 crc kubenswrapper[5002]: E0930 13:20:48.362985 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"548bb320433e0011a794c1a341eeb2f5943bac8850cea7c2e7aa5a8551eca8ab\": container with ID starting with 548bb320433e0011a794c1a341eeb2f5943bac8850cea7c2e7aa5a8551eca8ab not found: ID does not exist" containerID="548bb320433e0011a794c1a341eeb2f5943bac8850cea7c2e7aa5a8551eca8ab" Sep 30 13:20:48 crc kubenswrapper[5002]: I0930 13:20:48.363015 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"548bb320433e0011a794c1a341eeb2f5943bac8850cea7c2e7aa5a8551eca8ab"} err="failed to get container status \"548bb320433e0011a794c1a341eeb2f5943bac8850cea7c2e7aa5a8551eca8ab\": rpc error: code = NotFound desc = could not find container \"548bb320433e0011a794c1a341eeb2f5943bac8850cea7c2e7aa5a8551eca8ab\": container with ID starting with 548bb320433e0011a794c1a341eeb2f5943bac8850cea7c2e7aa5a8551eca8ab not found: ID does not exist" Sep 30 13:20:48 crc kubenswrapper[5002]: I0930 13:20:48.686290 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0197db8-9b97-4795-be59-5ed5bc8899b1" path="/var/lib/kubelet/pods/b0197db8-9b97-4795-be59-5ed5bc8899b1/volumes" Sep 30 13:20:53 crc kubenswrapper[5002]: I0930 13:20:53.676443 5002 scope.go:117] "RemoveContainer" containerID="99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b" Sep 30 13:20:53 crc kubenswrapper[5002]: E0930 13:20:53.677284 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:20:57 crc kubenswrapper[5002]: I0930 13:20:57.456670 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-864bb6dfb5-p8l92_c350ea0c-aa9c-4ccb-9607-aeff49e295b1/nmstate-console-plugin/0.log" Sep 30 13:20:57 crc kubenswrapper[5002]: I0930 13:20:57.646058 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-w7bgk_54cfc79e-c203-4715-a7d4-0120f7577db6/kube-rbac-proxy/0.log" Sep 30 13:20:57 crc kubenswrapper[5002]: I0930 13:20:57.652457 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-4s6bl_b3b353e1-b31e-45ba-b22e-6e78fd291203/nmstate-handler/0.log" Sep 30 13:20:57 crc kubenswrapper[5002]: I0930 13:20:57.688629 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-w7bgk_54cfc79e-c203-4715-a7d4-0120f7577db6/nmstate-metrics/0.log" Sep 30 13:20:57 crc kubenswrapper[5002]: I0930 13:20:57.863201 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5d6f6cfd66-45rf2_f5de2a34-58db-4513-a544-baac53a6ee7f/nmstate-operator/0.log" Sep 30 13:20:57 crc kubenswrapper[5002]: I0930 13:20:57.891253 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6d689559c5-5q5mt_7a89ecb4-494a-40f2-8e0c-871b2c94f8a2/nmstate-webhook/0.log" Sep 30 13:21:04 crc kubenswrapper[5002]: I0930 13:21:04.676619 5002 scope.go:117] "RemoveContainer" containerID="99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b" Sep 30 13:21:04 crc kubenswrapper[5002]: E0930 13:21:04.677288 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:21:10 crc kubenswrapper[5002]: I0930 13:21:10.426092 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-4hv4v_9381f980-e2fe-4cf6-97ad-93757413f357/kube-rbac-proxy/0.log" Sep 30 13:21:10 crc kubenswrapper[5002]: I0930 13:21:10.523490 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-4hv4v_9381f980-e2fe-4cf6-97ad-93757413f357/controller/0.log" Sep 30 13:21:10 crc kubenswrapper[5002]: I0930 13:21:10.629053 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/cp-frr-files/0.log" Sep 30 13:21:10 crc kubenswrapper[5002]: I0930 13:21:10.796055 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/cp-reloader/0.log" Sep 30 13:21:10 crc kubenswrapper[5002]: I0930 13:21:10.797664 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/cp-frr-files/0.log" Sep 30 13:21:10 crc kubenswrapper[5002]: I0930 13:21:10.829922 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/cp-reloader/0.log" Sep 30 13:21:10 crc kubenswrapper[5002]: I0930 13:21:10.838614 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/cp-metrics/0.log" Sep 30 13:21:11 crc kubenswrapper[5002]: I0930 13:21:11.003398 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/cp-metrics/0.log" Sep 30 13:21:11 crc kubenswrapper[5002]: I0930 13:21:11.005424 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/cp-metrics/0.log" Sep 30 13:21:11 crc kubenswrapper[5002]: I0930 13:21:11.017868 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/cp-reloader/0.log" Sep 30 13:21:11 crc kubenswrapper[5002]: I0930 13:21:11.018305 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/cp-frr-files/0.log" Sep 30 13:21:11 crc kubenswrapper[5002]: I0930 13:21:11.169598 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/cp-frr-files/0.log" Sep 30 13:21:11 crc kubenswrapper[5002]: I0930 13:21:11.178017 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/cp-reloader/0.log" Sep 30 13:21:11 crc kubenswrapper[5002]: I0930 13:21:11.202710 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/cp-metrics/0.log" Sep 30 13:21:11 crc kubenswrapper[5002]: I0930 13:21:11.225243 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/controller/0.log" Sep 30 13:21:11 crc kubenswrapper[5002]: I0930 13:21:11.376276 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/kube-rbac-proxy/0.log" Sep 30 13:21:11 crc kubenswrapper[5002]: I0930 13:21:11.388191 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/frr-metrics/0.log" Sep 30 13:21:11 crc kubenswrapper[5002]: I0930 13:21:11.410017 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/kube-rbac-proxy-frr/0.log" Sep 30 13:21:11 crc kubenswrapper[5002]: I0930 13:21:11.593113 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/reloader/0.log" Sep 30 13:21:11 crc kubenswrapper[5002]: I0930 13:21:11.640429 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-5478bdb765-j499w_94a678f1-7504-4246-9ce4-23886b1a3623/frr-k8s-webhook-server/0.log" Sep 30 13:21:11 crc kubenswrapper[5002]: I0930 13:21:11.867240 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-576d687654-bw9lz_27c28a83-93f3-40e5-9430-e95593fb9b70/manager/0.log" Sep 30 13:21:12 crc kubenswrapper[5002]: I0930 13:21:12.045806 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6dd5844d8b-5jxxh_6e455bb5-7fde-4e6e-a287-e053991325b2/webhook-server/0.log" Sep 30 13:21:12 crc kubenswrapper[5002]: I0930 13:21:12.065363 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-tq2q9_bae6d903-934d-4f98-9924-805cc9b20d5c/kube-rbac-proxy/0.log" Sep 30 13:21:12 crc kubenswrapper[5002]: I0930 13:21:12.727350 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-tq2q9_bae6d903-934d-4f98-9924-805cc9b20d5c/speaker/0.log" Sep 30 13:21:12 crc kubenswrapper[5002]: I0930 13:21:12.845284 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/frr/0.log" Sep 30 13:21:16 crc kubenswrapper[5002]: I0930 13:21:16.688135 5002 scope.go:117] "RemoveContainer" containerID="99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b" Sep 30 13:21:16 crc kubenswrapper[5002]: E0930 13:21:16.689026 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:21:23 crc kubenswrapper[5002]: I0930 13:21:23.926005 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg_8eafa11f-bdad-4035-82be-1fe0e27a0282/util/0.log" Sep 30 13:21:24 crc kubenswrapper[5002]: I0930 13:21:24.148833 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg_8eafa11f-bdad-4035-82be-1fe0e27a0282/pull/0.log" Sep 30 13:21:24 crc kubenswrapper[5002]: I0930 13:21:24.149031 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg_8eafa11f-bdad-4035-82be-1fe0e27a0282/pull/0.log" Sep 30 13:21:24 crc kubenswrapper[5002]: I0930 13:21:24.169084 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg_8eafa11f-bdad-4035-82be-1fe0e27a0282/util/0.log" Sep 30 13:21:24 crc kubenswrapper[5002]: I0930 13:21:24.298452 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg_8eafa11f-bdad-4035-82be-1fe0e27a0282/util/0.log" Sep 30 13:21:24 crc kubenswrapper[5002]: I0930 13:21:24.334712 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg_8eafa11f-bdad-4035-82be-1fe0e27a0282/pull/0.log" Sep 30 13:21:24 crc kubenswrapper[5002]: I0930 13:21:24.336154 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg_8eafa11f-bdad-4035-82be-1fe0e27a0282/extract/0.log" Sep 30 13:21:24 crc kubenswrapper[5002]: I0930 13:21:24.502484 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p4m6q_122cb276-c38c-4c29-80f9-b9e225b0a5a6/extract-utilities/0.log" Sep 30 13:21:24 crc kubenswrapper[5002]: I0930 13:21:24.694299 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p4m6q_122cb276-c38c-4c29-80f9-b9e225b0a5a6/extract-content/0.log" Sep 30 13:21:24 crc kubenswrapper[5002]: I0930 13:21:24.703616 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p4m6q_122cb276-c38c-4c29-80f9-b9e225b0a5a6/extract-content/0.log" Sep 30 13:21:24 crc kubenswrapper[5002]: I0930 13:21:24.705442 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p4m6q_122cb276-c38c-4c29-80f9-b9e225b0a5a6/extract-utilities/0.log" Sep 30 13:21:24 crc kubenswrapper[5002]: I0930 13:21:24.868128 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p4m6q_122cb276-c38c-4c29-80f9-b9e225b0a5a6/extract-content/0.log" Sep 30 13:21:24 crc kubenswrapper[5002]: I0930 13:21:24.899260 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p4m6q_122cb276-c38c-4c29-80f9-b9e225b0a5a6/extract-utilities/0.log" Sep 30 13:21:25 crc kubenswrapper[5002]: I0930 13:21:25.080409 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rqfbc_d184b411-2932-4ace-a20a-b81cdfec713a/extract-utilities/0.log" Sep 30 13:21:25 crc kubenswrapper[5002]: I0930 13:21:25.391945 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p4m6q_122cb276-c38c-4c29-80f9-b9e225b0a5a6/registry-server/0.log" Sep 30 13:21:25 crc kubenswrapper[5002]: I0930 13:21:25.394982 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rqfbc_d184b411-2932-4ace-a20a-b81cdfec713a/extract-utilities/0.log" Sep 30 13:21:25 crc kubenswrapper[5002]: I0930 13:21:25.440741 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rqfbc_d184b411-2932-4ace-a20a-b81cdfec713a/extract-content/0.log" Sep 30 13:21:25 crc kubenswrapper[5002]: I0930 13:21:25.476434 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rqfbc_d184b411-2932-4ace-a20a-b81cdfec713a/extract-content/0.log" Sep 30 13:21:25 crc kubenswrapper[5002]: I0930 13:21:25.585085 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rqfbc_d184b411-2932-4ace-a20a-b81cdfec713a/extract-utilities/0.log" Sep 30 13:21:25 crc kubenswrapper[5002]: I0930 13:21:25.594073 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rqfbc_d184b411-2932-4ace-a20a-b81cdfec713a/extract-content/0.log" Sep 30 13:21:25 crc kubenswrapper[5002]: I0930 13:21:25.844860 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5_9da270d3-58ec-44e6-acaa-3cb86fbc2047/util/0.log" Sep 30 13:21:25 crc kubenswrapper[5002]: I0930 13:21:25.860515 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rqfbc_d184b411-2932-4ace-a20a-b81cdfec713a/registry-server/0.log" Sep 30 13:21:26 crc kubenswrapper[5002]: I0930 13:21:26.007827 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5_9da270d3-58ec-44e6-acaa-3cb86fbc2047/util/0.log" Sep 30 13:21:26 crc kubenswrapper[5002]: I0930 13:21:26.024495 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5_9da270d3-58ec-44e6-acaa-3cb86fbc2047/pull/0.log" Sep 30 13:21:26 crc kubenswrapper[5002]: I0930 13:21:26.024562 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5_9da270d3-58ec-44e6-acaa-3cb86fbc2047/pull/0.log" Sep 30 13:21:26 crc kubenswrapper[5002]: I0930 13:21:26.191225 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5_9da270d3-58ec-44e6-acaa-3cb86fbc2047/pull/0.log" Sep 30 13:21:26 crc kubenswrapper[5002]: I0930 13:21:26.203015 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5_9da270d3-58ec-44e6-acaa-3cb86fbc2047/util/0.log" Sep 30 13:21:26 crc kubenswrapper[5002]: I0930 13:21:26.208846 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5_9da270d3-58ec-44e6-acaa-3cb86fbc2047/extract/0.log" Sep 30 13:21:26 crc kubenswrapper[5002]: I0930 13:21:26.381772 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-hrqrc_75fd37d6-6c7a-4c39-8ed0-c9ba6caa2b7f/marketplace-operator/0.log" Sep 30 13:21:26 crc kubenswrapper[5002]: I0930 13:21:26.385119 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kzj7j_68797de5-40f1-448f-9fbb-fa3eb4adc842/extract-utilities/0.log" Sep 30 13:21:26 crc kubenswrapper[5002]: I0930 13:21:26.558138 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kzj7j_68797de5-40f1-448f-9fbb-fa3eb4adc842/extract-content/0.log" Sep 30 13:21:26 crc kubenswrapper[5002]: I0930 13:21:26.574556 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kzj7j_68797de5-40f1-448f-9fbb-fa3eb4adc842/extract-utilities/0.log" Sep 30 13:21:26 crc kubenswrapper[5002]: I0930 13:21:26.581564 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kzj7j_68797de5-40f1-448f-9fbb-fa3eb4adc842/extract-content/0.log" Sep 30 13:21:26 crc kubenswrapper[5002]: I0930 13:21:26.815845 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kzj7j_68797de5-40f1-448f-9fbb-fa3eb4adc842/extract-content/0.log" Sep 30 13:21:26 crc kubenswrapper[5002]: I0930 13:21:26.815928 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kzj7j_68797de5-40f1-448f-9fbb-fa3eb4adc842/extract-utilities/0.log" Sep 30 13:21:26 crc kubenswrapper[5002]: I0930 13:21:26.961537 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kzj7j_68797de5-40f1-448f-9fbb-fa3eb4adc842/registry-server/0.log" Sep 30 13:21:27 crc kubenswrapper[5002]: I0930 13:21:27.059345 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-twns5_dc331b2c-354f-4cb2-9aab-2f8328781341/extract-utilities/0.log" Sep 30 13:21:27 crc kubenswrapper[5002]: I0930 13:21:27.197410 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-twns5_dc331b2c-354f-4cb2-9aab-2f8328781341/extract-utilities/0.log" Sep 30 13:21:27 crc kubenswrapper[5002]: I0930 13:21:27.219701 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-twns5_dc331b2c-354f-4cb2-9aab-2f8328781341/extract-content/0.log" Sep 30 13:21:27 crc kubenswrapper[5002]: I0930 13:21:27.227993 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-twns5_dc331b2c-354f-4cb2-9aab-2f8328781341/extract-content/0.log" Sep 30 13:21:27 crc kubenswrapper[5002]: I0930 13:21:27.433180 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-twns5_dc331b2c-354f-4cb2-9aab-2f8328781341/extract-utilities/0.log" Sep 30 13:21:27 crc kubenswrapper[5002]: I0930 13:21:27.455390 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-twns5_dc331b2c-354f-4cb2-9aab-2f8328781341/extract-content/0.log" Sep 30 13:21:27 crc kubenswrapper[5002]: I0930 13:21:27.675671 5002 scope.go:117] "RemoveContainer" containerID="99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b" Sep 30 13:21:27 crc kubenswrapper[5002]: E0930 13:21:27.675906 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:21:28 crc kubenswrapper[5002]: I0930 13:21:28.013166 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-twns5_dc331b2c-354f-4cb2-9aab-2f8328781341/registry-server/0.log" Sep 30 13:21:38 crc kubenswrapper[5002]: I0930 13:21:38.676768 5002 scope.go:117] "RemoveContainer" containerID="99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b" Sep 30 13:21:38 crc kubenswrapper[5002]: E0930 13:21:38.677599 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:21:52 crc kubenswrapper[5002]: I0930 13:21:52.679819 5002 scope.go:117] "RemoveContainer" containerID="99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b" Sep 30 13:21:52 crc kubenswrapper[5002]: E0930 13:21:52.680638 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:22:06 crc kubenswrapper[5002]: I0930 13:22:06.684653 5002 scope.go:117] "RemoveContainer" containerID="99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b" Sep 30 13:22:06 crc kubenswrapper[5002]: E0930 13:22:06.685413 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:22:18 crc kubenswrapper[5002]: I0930 13:22:18.676400 5002 scope.go:117] "RemoveContainer" containerID="99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b" Sep 30 13:22:18 crc kubenswrapper[5002]: E0930 13:22:18.677731 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:22:31 crc kubenswrapper[5002]: I0930 13:22:31.677298 5002 scope.go:117] "RemoveContainer" containerID="99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b" Sep 30 13:22:31 crc kubenswrapper[5002]: E0930 13:22:31.678255 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:22:46 crc kubenswrapper[5002]: I0930 13:22:46.689582 5002 scope.go:117] "RemoveContainer" containerID="99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b" Sep 30 13:22:46 crc kubenswrapper[5002]: E0930 13:22:46.690786 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:22:58 crc kubenswrapper[5002]: I0930 13:22:58.678378 5002 scope.go:117] "RemoveContainer" containerID="99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b" Sep 30 13:22:58 crc kubenswrapper[5002]: E0930 13:22:58.679265 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:23:13 crc kubenswrapper[5002]: I0930 13:23:13.676079 5002 scope.go:117] "RemoveContainer" containerID="99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b" Sep 30 13:23:13 crc kubenswrapper[5002]: E0930 13:23:13.677044 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:23:23 crc kubenswrapper[5002]: I0930 13:23:23.748213 5002 generic.go:334] "Generic (PLEG): container finished" podID="27d1ca3b-9ebd-4455-972d-11f395adb2c7" containerID="901e8c96f367fde5db58e1d9a45ae121840574075d6496bbf8f8dd0cf9e9f4ce" exitCode=0 Sep 30 13:23:23 crc kubenswrapper[5002]: I0930 13:23:23.748357 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4ppnm/must-gather-f26t8" event={"ID":"27d1ca3b-9ebd-4455-972d-11f395adb2c7","Type":"ContainerDied","Data":"901e8c96f367fde5db58e1d9a45ae121840574075d6496bbf8f8dd0cf9e9f4ce"} Sep 30 13:23:23 crc kubenswrapper[5002]: I0930 13:23:23.749467 5002 scope.go:117] "RemoveContainer" containerID="901e8c96f367fde5db58e1d9a45ae121840574075d6496bbf8f8dd0cf9e9f4ce" Sep 30 13:23:24 crc kubenswrapper[5002]: I0930 13:23:24.630401 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-4ppnm_must-gather-f26t8_27d1ca3b-9ebd-4455-972d-11f395adb2c7/gather/0.log" Sep 30 13:23:28 crc kubenswrapper[5002]: I0930 13:23:28.676813 5002 scope.go:117] "RemoveContainer" containerID="99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b" Sep 30 13:23:28 crc kubenswrapper[5002]: E0930 13:23:28.677631 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:23:33 crc kubenswrapper[5002]: I0930 13:23:33.758251 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-4ppnm/must-gather-f26t8"] Sep 30 13:23:33 crc kubenswrapper[5002]: I0930 13:23:33.759168 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-4ppnm/must-gather-f26t8" podUID="27d1ca3b-9ebd-4455-972d-11f395adb2c7" containerName="copy" containerID="cri-o://ba809f0e6cb72d539b4eb13abc5edc183dfb9f0701c0f704e748bcdff41085e6" gracePeriod=2 Sep 30 13:23:33 crc kubenswrapper[5002]: I0930 13:23:33.775001 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-4ppnm/must-gather-f26t8"] Sep 30 13:23:34 crc kubenswrapper[5002]: I0930 13:23:34.706159 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-4ppnm_must-gather-f26t8_27d1ca3b-9ebd-4455-972d-11f395adb2c7/copy/0.log" Sep 30 13:23:34 crc kubenswrapper[5002]: I0930 13:23:34.707114 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4ppnm/must-gather-f26t8" Sep 30 13:23:34 crc kubenswrapper[5002]: I0930 13:23:34.794043 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxln5\" (UniqueName: \"kubernetes.io/projected/27d1ca3b-9ebd-4455-972d-11f395adb2c7-kube-api-access-zxln5\") pod \"27d1ca3b-9ebd-4455-972d-11f395adb2c7\" (UID: \"27d1ca3b-9ebd-4455-972d-11f395adb2c7\") " Sep 30 13:23:34 crc kubenswrapper[5002]: I0930 13:23:34.794138 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/27d1ca3b-9ebd-4455-972d-11f395adb2c7-must-gather-output\") pod \"27d1ca3b-9ebd-4455-972d-11f395adb2c7\" (UID: \"27d1ca3b-9ebd-4455-972d-11f395adb2c7\") " Sep 30 13:23:34 crc kubenswrapper[5002]: I0930 13:23:34.800436 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27d1ca3b-9ebd-4455-972d-11f395adb2c7-kube-api-access-zxln5" (OuterVolumeSpecName: "kube-api-access-zxln5") pod "27d1ca3b-9ebd-4455-972d-11f395adb2c7" (UID: "27d1ca3b-9ebd-4455-972d-11f395adb2c7"). InnerVolumeSpecName "kube-api-access-zxln5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:23:34 crc kubenswrapper[5002]: I0930 13:23:34.867274 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-4ppnm_must-gather-f26t8_27d1ca3b-9ebd-4455-972d-11f395adb2c7/copy/0.log" Sep 30 13:23:34 crc kubenswrapper[5002]: I0930 13:23:34.868948 5002 generic.go:334] "Generic (PLEG): container finished" podID="27d1ca3b-9ebd-4455-972d-11f395adb2c7" containerID="ba809f0e6cb72d539b4eb13abc5edc183dfb9f0701c0f704e748bcdff41085e6" exitCode=143 Sep 30 13:23:34 crc kubenswrapper[5002]: I0930 13:23:34.869026 5002 scope.go:117] "RemoveContainer" containerID="ba809f0e6cb72d539b4eb13abc5edc183dfb9f0701c0f704e748bcdff41085e6" Sep 30 13:23:34 crc kubenswrapper[5002]: I0930 13:23:34.869269 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4ppnm/must-gather-f26t8" Sep 30 13:23:34 crc kubenswrapper[5002]: I0930 13:23:34.891648 5002 scope.go:117] "RemoveContainer" containerID="901e8c96f367fde5db58e1d9a45ae121840574075d6496bbf8f8dd0cf9e9f4ce" Sep 30 13:23:34 crc kubenswrapper[5002]: I0930 13:23:34.896832 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxln5\" (UniqueName: \"kubernetes.io/projected/27d1ca3b-9ebd-4455-972d-11f395adb2c7-kube-api-access-zxln5\") on node \"crc\" DevicePath \"\"" Sep 30 13:23:34 crc kubenswrapper[5002]: I0930 13:23:34.955568 5002 scope.go:117] "RemoveContainer" containerID="ba809f0e6cb72d539b4eb13abc5edc183dfb9f0701c0f704e748bcdff41085e6" Sep 30 13:23:34 crc kubenswrapper[5002]: E0930 13:23:34.956053 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba809f0e6cb72d539b4eb13abc5edc183dfb9f0701c0f704e748bcdff41085e6\": container with ID starting with ba809f0e6cb72d539b4eb13abc5edc183dfb9f0701c0f704e748bcdff41085e6 not found: ID does not exist" containerID="ba809f0e6cb72d539b4eb13abc5edc183dfb9f0701c0f704e748bcdff41085e6" Sep 30 13:23:34 crc kubenswrapper[5002]: I0930 13:23:34.956152 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba809f0e6cb72d539b4eb13abc5edc183dfb9f0701c0f704e748bcdff41085e6"} err="failed to get container status \"ba809f0e6cb72d539b4eb13abc5edc183dfb9f0701c0f704e748bcdff41085e6\": rpc error: code = NotFound desc = could not find container \"ba809f0e6cb72d539b4eb13abc5edc183dfb9f0701c0f704e748bcdff41085e6\": container with ID starting with ba809f0e6cb72d539b4eb13abc5edc183dfb9f0701c0f704e748bcdff41085e6 not found: ID does not exist" Sep 30 13:23:34 crc kubenswrapper[5002]: I0930 13:23:34.956187 5002 scope.go:117] "RemoveContainer" containerID="901e8c96f367fde5db58e1d9a45ae121840574075d6496bbf8f8dd0cf9e9f4ce" Sep 30 13:23:34 crc kubenswrapper[5002]: I0930 13:23:34.956105 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27d1ca3b-9ebd-4455-972d-11f395adb2c7-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "27d1ca3b-9ebd-4455-972d-11f395adb2c7" (UID: "27d1ca3b-9ebd-4455-972d-11f395adb2c7"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:23:34 crc kubenswrapper[5002]: E0930 13:23:34.956673 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"901e8c96f367fde5db58e1d9a45ae121840574075d6496bbf8f8dd0cf9e9f4ce\": container with ID starting with 901e8c96f367fde5db58e1d9a45ae121840574075d6496bbf8f8dd0cf9e9f4ce not found: ID does not exist" containerID="901e8c96f367fde5db58e1d9a45ae121840574075d6496bbf8f8dd0cf9e9f4ce" Sep 30 13:23:34 crc kubenswrapper[5002]: I0930 13:23:34.956702 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"901e8c96f367fde5db58e1d9a45ae121840574075d6496bbf8f8dd0cf9e9f4ce"} err="failed to get container status \"901e8c96f367fde5db58e1d9a45ae121840574075d6496bbf8f8dd0cf9e9f4ce\": rpc error: code = NotFound desc = could not find container \"901e8c96f367fde5db58e1d9a45ae121840574075d6496bbf8f8dd0cf9e9f4ce\": container with ID starting with 901e8c96f367fde5db58e1d9a45ae121840574075d6496bbf8f8dd0cf9e9f4ce not found: ID does not exist" Sep 30 13:23:34 crc kubenswrapper[5002]: I0930 13:23:34.998695 5002 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/27d1ca3b-9ebd-4455-972d-11f395adb2c7-must-gather-output\") on node \"crc\" DevicePath \"\"" Sep 30 13:23:36 crc kubenswrapper[5002]: I0930 13:23:36.687390 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27d1ca3b-9ebd-4455-972d-11f395adb2c7" path="/var/lib/kubelet/pods/27d1ca3b-9ebd-4455-972d-11f395adb2c7/volumes" Sep 30 13:23:43 crc kubenswrapper[5002]: I0930 13:23:43.675887 5002 scope.go:117] "RemoveContainer" containerID="99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b" Sep 30 13:23:43 crc kubenswrapper[5002]: E0930 13:23:43.676708 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:23:58 crc kubenswrapper[5002]: I0930 13:23:58.676593 5002 scope.go:117] "RemoveContainer" containerID="99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b" Sep 30 13:23:58 crc kubenswrapper[5002]: E0930 13:23:58.677652 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:24:01 crc kubenswrapper[5002]: I0930 13:24:01.543532 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-smlkm/must-gather-hdp54"] Sep 30 13:24:01 crc kubenswrapper[5002]: E0930 13:24:01.544193 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f62ce328-e2fc-46f3-84a3-bd873b0d9b3f" containerName="registry-server" Sep 30 13:24:01 crc kubenswrapper[5002]: I0930 13:24:01.544206 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f62ce328-e2fc-46f3-84a3-bd873b0d9b3f" containerName="registry-server" Sep 30 13:24:01 crc kubenswrapper[5002]: E0930 13:24:01.544224 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0197db8-9b97-4795-be59-5ed5bc8899b1" containerName="extract-content" Sep 30 13:24:01 crc kubenswrapper[5002]: I0930 13:24:01.544230 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0197db8-9b97-4795-be59-5ed5bc8899b1" containerName="extract-content" Sep 30 13:24:01 crc kubenswrapper[5002]: E0930 13:24:01.544247 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f62ce328-e2fc-46f3-84a3-bd873b0d9b3f" containerName="extract-content" Sep 30 13:24:01 crc kubenswrapper[5002]: I0930 13:24:01.544254 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f62ce328-e2fc-46f3-84a3-bd873b0d9b3f" containerName="extract-content" Sep 30 13:24:01 crc kubenswrapper[5002]: E0930 13:24:01.544266 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27d1ca3b-9ebd-4455-972d-11f395adb2c7" containerName="gather" Sep 30 13:24:01 crc kubenswrapper[5002]: I0930 13:24:01.544274 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="27d1ca3b-9ebd-4455-972d-11f395adb2c7" containerName="gather" Sep 30 13:24:01 crc kubenswrapper[5002]: E0930 13:24:01.544286 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27d1ca3b-9ebd-4455-972d-11f395adb2c7" containerName="copy" Sep 30 13:24:01 crc kubenswrapper[5002]: I0930 13:24:01.544292 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="27d1ca3b-9ebd-4455-972d-11f395adb2c7" containerName="copy" Sep 30 13:24:01 crc kubenswrapper[5002]: E0930 13:24:01.544304 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0197db8-9b97-4795-be59-5ed5bc8899b1" containerName="extract-utilities" Sep 30 13:24:01 crc kubenswrapper[5002]: I0930 13:24:01.544310 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0197db8-9b97-4795-be59-5ed5bc8899b1" containerName="extract-utilities" Sep 30 13:24:01 crc kubenswrapper[5002]: E0930 13:24:01.544321 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f62ce328-e2fc-46f3-84a3-bd873b0d9b3f" containerName="extract-utilities" Sep 30 13:24:01 crc kubenswrapper[5002]: I0930 13:24:01.544326 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f62ce328-e2fc-46f3-84a3-bd873b0d9b3f" containerName="extract-utilities" Sep 30 13:24:01 crc kubenswrapper[5002]: E0930 13:24:01.544343 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0197db8-9b97-4795-be59-5ed5bc8899b1" containerName="registry-server" Sep 30 13:24:01 crc kubenswrapper[5002]: I0930 13:24:01.544348 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0197db8-9b97-4795-be59-5ed5bc8899b1" containerName="registry-server" Sep 30 13:24:01 crc kubenswrapper[5002]: I0930 13:24:01.544563 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="27d1ca3b-9ebd-4455-972d-11f395adb2c7" containerName="copy" Sep 30 13:24:01 crc kubenswrapper[5002]: I0930 13:24:01.544576 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0197db8-9b97-4795-be59-5ed5bc8899b1" containerName="registry-server" Sep 30 13:24:01 crc kubenswrapper[5002]: I0930 13:24:01.544587 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="27d1ca3b-9ebd-4455-972d-11f395adb2c7" containerName="gather" Sep 30 13:24:01 crc kubenswrapper[5002]: I0930 13:24:01.544595 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f62ce328-e2fc-46f3-84a3-bd873b0d9b3f" containerName="registry-server" Sep 30 13:24:01 crc kubenswrapper[5002]: I0930 13:24:01.545728 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-smlkm/must-gather-hdp54" Sep 30 13:24:01 crc kubenswrapper[5002]: I0930 13:24:01.548322 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-smlkm"/"kube-root-ca.crt" Sep 30 13:24:01 crc kubenswrapper[5002]: I0930 13:24:01.550240 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-smlkm"/"openshift-service-ca.crt" Sep 30 13:24:01 crc kubenswrapper[5002]: I0930 13:24:01.567001 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-smlkm/must-gather-hdp54"] Sep 30 13:24:01 crc kubenswrapper[5002]: I0930 13:24:01.724747 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/36c403d4-7bcb-4916-b08d-fcf0f1f8470a-must-gather-output\") pod \"must-gather-hdp54\" (UID: \"36c403d4-7bcb-4916-b08d-fcf0f1f8470a\") " pod="openshift-must-gather-smlkm/must-gather-hdp54" Sep 30 13:24:01 crc kubenswrapper[5002]: I0930 13:24:01.724825 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcmq9\" (UniqueName: \"kubernetes.io/projected/36c403d4-7bcb-4916-b08d-fcf0f1f8470a-kube-api-access-kcmq9\") pod \"must-gather-hdp54\" (UID: \"36c403d4-7bcb-4916-b08d-fcf0f1f8470a\") " pod="openshift-must-gather-smlkm/must-gather-hdp54" Sep 30 13:24:01 crc kubenswrapper[5002]: I0930 13:24:01.826741 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/36c403d4-7bcb-4916-b08d-fcf0f1f8470a-must-gather-output\") pod \"must-gather-hdp54\" (UID: \"36c403d4-7bcb-4916-b08d-fcf0f1f8470a\") " pod="openshift-must-gather-smlkm/must-gather-hdp54" Sep 30 13:24:01 crc kubenswrapper[5002]: I0930 13:24:01.826928 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcmq9\" (UniqueName: \"kubernetes.io/projected/36c403d4-7bcb-4916-b08d-fcf0f1f8470a-kube-api-access-kcmq9\") pod \"must-gather-hdp54\" (UID: \"36c403d4-7bcb-4916-b08d-fcf0f1f8470a\") " pod="openshift-must-gather-smlkm/must-gather-hdp54" Sep 30 13:24:01 crc kubenswrapper[5002]: I0930 13:24:01.827522 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/36c403d4-7bcb-4916-b08d-fcf0f1f8470a-must-gather-output\") pod \"must-gather-hdp54\" (UID: \"36c403d4-7bcb-4916-b08d-fcf0f1f8470a\") " pod="openshift-must-gather-smlkm/must-gather-hdp54" Sep 30 13:24:01 crc kubenswrapper[5002]: I0930 13:24:01.850872 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcmq9\" (UniqueName: \"kubernetes.io/projected/36c403d4-7bcb-4916-b08d-fcf0f1f8470a-kube-api-access-kcmq9\") pod \"must-gather-hdp54\" (UID: \"36c403d4-7bcb-4916-b08d-fcf0f1f8470a\") " pod="openshift-must-gather-smlkm/must-gather-hdp54" Sep 30 13:24:01 crc kubenswrapper[5002]: I0930 13:24:01.863132 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-smlkm/must-gather-hdp54" Sep 30 13:24:02 crc kubenswrapper[5002]: I0930 13:24:02.312387 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-smlkm/must-gather-hdp54"] Sep 30 13:24:03 crc kubenswrapper[5002]: I0930 13:24:03.133567 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-smlkm/must-gather-hdp54" event={"ID":"36c403d4-7bcb-4916-b08d-fcf0f1f8470a","Type":"ContainerStarted","Data":"e29a59ce9fd9b7e11d33799ae3dd4e0581d9f83156f2e07f1e353a4a6208682e"} Sep 30 13:24:03 crc kubenswrapper[5002]: I0930 13:24:03.134187 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-smlkm/must-gather-hdp54" event={"ID":"36c403d4-7bcb-4916-b08d-fcf0f1f8470a","Type":"ContainerStarted","Data":"3bca0e61e11f809246a2dac1fb80f1bae7c10615930592fb16a1e54d4430522a"} Sep 30 13:24:03 crc kubenswrapper[5002]: I0930 13:24:03.134205 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-smlkm/must-gather-hdp54" event={"ID":"36c403d4-7bcb-4916-b08d-fcf0f1f8470a","Type":"ContainerStarted","Data":"f3c7665f306ff4486305bf9b68e215d838dcd89e68c366c7e2add771be9dfc58"} Sep 30 13:24:03 crc kubenswrapper[5002]: I0930 13:24:03.161852 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-smlkm/must-gather-hdp54" podStartSLOduration=2.161836104 podStartE2EDuration="2.161836104s" podCreationTimestamp="2025-09-30 13:24:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:24:03.150077915 +0000 UTC m=+3817.399760061" watchObservedRunningTime="2025-09-30 13:24:03.161836104 +0000 UTC m=+3817.411518250" Sep 30 13:24:05 crc kubenswrapper[5002]: I0930 13:24:05.988728 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-smlkm/crc-debug-x5j2l"] Sep 30 13:24:05 crc kubenswrapper[5002]: I0930 13:24:05.990568 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-smlkm/crc-debug-x5j2l" Sep 30 13:24:05 crc kubenswrapper[5002]: I0930 13:24:05.992328 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-smlkm"/"default-dockercfg-fk85c" Sep 30 13:24:06 crc kubenswrapper[5002]: I0930 13:24:06.114901 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9gmt\" (UniqueName: \"kubernetes.io/projected/4038ded3-a825-4803-951d-9e57ee43f00c-kube-api-access-w9gmt\") pod \"crc-debug-x5j2l\" (UID: \"4038ded3-a825-4803-951d-9e57ee43f00c\") " pod="openshift-must-gather-smlkm/crc-debug-x5j2l" Sep 30 13:24:06 crc kubenswrapper[5002]: I0930 13:24:06.114947 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4038ded3-a825-4803-951d-9e57ee43f00c-host\") pod \"crc-debug-x5j2l\" (UID: \"4038ded3-a825-4803-951d-9e57ee43f00c\") " pod="openshift-must-gather-smlkm/crc-debug-x5j2l" Sep 30 13:24:06 crc kubenswrapper[5002]: I0930 13:24:06.216982 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9gmt\" (UniqueName: \"kubernetes.io/projected/4038ded3-a825-4803-951d-9e57ee43f00c-kube-api-access-w9gmt\") pod \"crc-debug-x5j2l\" (UID: \"4038ded3-a825-4803-951d-9e57ee43f00c\") " pod="openshift-must-gather-smlkm/crc-debug-x5j2l" Sep 30 13:24:06 crc kubenswrapper[5002]: I0930 13:24:06.217031 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4038ded3-a825-4803-951d-9e57ee43f00c-host\") pod \"crc-debug-x5j2l\" (UID: \"4038ded3-a825-4803-951d-9e57ee43f00c\") " pod="openshift-must-gather-smlkm/crc-debug-x5j2l" Sep 30 13:24:06 crc kubenswrapper[5002]: I0930 13:24:06.217230 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4038ded3-a825-4803-951d-9e57ee43f00c-host\") pod \"crc-debug-x5j2l\" (UID: \"4038ded3-a825-4803-951d-9e57ee43f00c\") " pod="openshift-must-gather-smlkm/crc-debug-x5j2l" Sep 30 13:24:06 crc kubenswrapper[5002]: I0930 13:24:06.238288 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9gmt\" (UniqueName: \"kubernetes.io/projected/4038ded3-a825-4803-951d-9e57ee43f00c-kube-api-access-w9gmt\") pod \"crc-debug-x5j2l\" (UID: \"4038ded3-a825-4803-951d-9e57ee43f00c\") " pod="openshift-must-gather-smlkm/crc-debug-x5j2l" Sep 30 13:24:06 crc kubenswrapper[5002]: I0930 13:24:06.310049 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-smlkm/crc-debug-x5j2l" Sep 30 13:24:06 crc kubenswrapper[5002]: W0930 13:24:06.344865 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4038ded3_a825_4803_951d_9e57ee43f00c.slice/crio-158b177e24bbccbdcdbc9a92aaf8c29538721a9deb2f4234f1e7ec0c623c93b0 WatchSource:0}: Error finding container 158b177e24bbccbdcdbc9a92aaf8c29538721a9deb2f4234f1e7ec0c623c93b0: Status 404 returned error can't find the container with id 158b177e24bbccbdcdbc9a92aaf8c29538721a9deb2f4234f1e7ec0c623c93b0 Sep 30 13:24:07 crc kubenswrapper[5002]: I0930 13:24:07.172821 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-smlkm/crc-debug-x5j2l" event={"ID":"4038ded3-a825-4803-951d-9e57ee43f00c","Type":"ContainerStarted","Data":"65f3b243c81108de2aff460e6f96d90af24fc09cc9f23c07a8b06261dceafb7d"} Sep 30 13:24:07 crc kubenswrapper[5002]: I0930 13:24:07.173411 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-smlkm/crc-debug-x5j2l" event={"ID":"4038ded3-a825-4803-951d-9e57ee43f00c","Type":"ContainerStarted","Data":"158b177e24bbccbdcdbc9a92aaf8c29538721a9deb2f4234f1e7ec0c623c93b0"} Sep 30 13:24:10 crc kubenswrapper[5002]: I0930 13:24:10.676493 5002 scope.go:117] "RemoveContainer" containerID="99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b" Sep 30 13:24:10 crc kubenswrapper[5002]: E0930 13:24:10.677149 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:24:23 crc kubenswrapper[5002]: I0930 13:24:23.676289 5002 scope.go:117] "RemoveContainer" containerID="99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b" Sep 30 13:24:23 crc kubenswrapper[5002]: E0930 13:24:23.677074 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ncbb5_openshift-machine-config-operator(341a55c6-78d3-4fa2-8f47-b56fd41fa1c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" Sep 30 13:24:37 crc kubenswrapper[5002]: I0930 13:24:37.675991 5002 scope.go:117] "RemoveContainer" containerID="99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b" Sep 30 13:24:38 crc kubenswrapper[5002]: I0930 13:24:38.455097 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerStarted","Data":"804478c07c3268d7d7f250189ddbad5a2e4a5479ddd419784cb5f98b480546c7"} Sep 30 13:24:38 crc kubenswrapper[5002]: I0930 13:24:38.477009 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-smlkm/crc-debug-x5j2l" podStartSLOduration=33.476991042 podStartE2EDuration="33.476991042s" podCreationTimestamp="2025-09-30 13:24:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:24:07.18768479 +0000 UTC m=+3821.437366926" watchObservedRunningTime="2025-09-30 13:24:38.476991042 +0000 UTC m=+3852.726673188" Sep 30 13:25:01 crc kubenswrapper[5002]: I0930 13:25:01.648024 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-858c4cb9d6-g5ts6_ed88e034-9e24-4611-9d19-90530ff3f7b1/barbican-api-log/0.log" Sep 30 13:25:01 crc kubenswrapper[5002]: I0930 13:25:01.685267 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-858c4cb9d6-g5ts6_ed88e034-9e24-4611-9d19-90530ff3f7b1/barbican-api/0.log" Sep 30 13:25:01 crc kubenswrapper[5002]: I0930 13:25:01.843937 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-64bf5676fd-f8zrr_fc432228-1ae9-4a76-a81f-a8a7d2d44492/barbican-keystone-listener/0.log" Sep 30 13:25:01 crc kubenswrapper[5002]: I0930 13:25:01.901724 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-64bf5676fd-f8zrr_fc432228-1ae9-4a76-a81f-a8a7d2d44492/barbican-keystone-listener-log/0.log" Sep 30 13:25:02 crc kubenswrapper[5002]: I0930 13:25:02.005021 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5976c988bc-j7gh5_295327d8-7973-4826-a8ab-34dcf2f4b5d5/barbican-worker/0.log" Sep 30 13:25:02 crc kubenswrapper[5002]: I0930 13:25:02.053508 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5976c988bc-j7gh5_295327d8-7973-4826-a8ab-34dcf2f4b5d5/barbican-worker-log/0.log" Sep 30 13:25:02 crc kubenswrapper[5002]: I0930 13:25:02.267367 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-xzr9z_016126fa-8541-4424-b217-acf7d88e5680/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:25:02 crc kubenswrapper[5002]: I0930 13:25:02.431368 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_85c8795c-0a51-4d53-a20d-b0b96d217d93/ceilometer-central-agent/0.log" Sep 30 13:25:02 crc kubenswrapper[5002]: I0930 13:25:02.482270 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_85c8795c-0a51-4d53-a20d-b0b96d217d93/proxy-httpd/0.log" Sep 30 13:25:02 crc kubenswrapper[5002]: I0930 13:25:02.494249 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_85c8795c-0a51-4d53-a20d-b0b96d217d93/ceilometer-notification-agent/0.log" Sep 30 13:25:02 crc kubenswrapper[5002]: I0930 13:25:02.631202 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_85c8795c-0a51-4d53-a20d-b0b96d217d93/sg-core/0.log" Sep 30 13:25:02 crc kubenswrapper[5002]: I0930 13:25:02.721197 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_f71d6351-0ed1-4047-bffa-cc1020e38ecf/cinder-api/0.log" Sep 30 13:25:02 crc kubenswrapper[5002]: I0930 13:25:02.810595 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_f71d6351-0ed1-4047-bffa-cc1020e38ecf/cinder-api-log/0.log" Sep 30 13:25:02 crc kubenswrapper[5002]: I0930 13:25:02.932398 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_9504c15f-ff11-4255-9371-b0481f692c0b/cinder-scheduler/0.log" Sep 30 13:25:03 crc kubenswrapper[5002]: I0930 13:25:03.038530 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_9504c15f-ff11-4255-9371-b0481f692c0b/probe/0.log" Sep 30 13:25:03 crc kubenswrapper[5002]: I0930 13:25:03.131111 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-p69m6_0b171fad-8a7f-4271-b1e6-43b03111244d/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:25:03 crc kubenswrapper[5002]: I0930 13:25:03.320925 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-gwfhm_36ca4049-6444-4a67-b607-b15095a3dabf/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:25:03 crc kubenswrapper[5002]: I0930 13:25:03.463431 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-tp5lv_f4f8c21f-af18-49de-8a07-140d16e9785f/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:25:03 crc kubenswrapper[5002]: I0930 13:25:03.561041 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-kzgd6_8ef8ec7f-169f-494e-b17d-4206c144d4f3/init/0.log" Sep 30 13:25:03 crc kubenswrapper[5002]: I0930 13:25:03.759344 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-kzgd6_8ef8ec7f-169f-494e-b17d-4206c144d4f3/init/0.log" Sep 30 13:25:03 crc kubenswrapper[5002]: I0930 13:25:03.812591 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-kzgd6_8ef8ec7f-169f-494e-b17d-4206c144d4f3/dnsmasq-dns/0.log" Sep 30 13:25:04 crc kubenswrapper[5002]: I0930 13:25:04.056690 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-lpdr2_5389ead8-63bd-4216-9653-48519fa391fb/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:25:04 crc kubenswrapper[5002]: I0930 13:25:04.085958 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_b5a818b5-30cc-4c21-b7c4-7563b49832eb/glance-httpd/0.log" Sep 30 13:25:04 crc kubenswrapper[5002]: I0930 13:25:04.248184 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_b5a818b5-30cc-4c21-b7c4-7563b49832eb/glance-log/0.log" Sep 30 13:25:04 crc kubenswrapper[5002]: I0930 13:25:04.275843 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_17156d80-5354-430c-a7f9-294bae55a11c/glance-httpd/0.log" Sep 30 13:25:04 crc kubenswrapper[5002]: I0930 13:25:04.415402 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_17156d80-5354-430c-a7f9-294bae55a11c/glance-log/0.log" Sep 30 13:25:04 crc kubenswrapper[5002]: I0930 13:25:04.566334 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-66c799f4f6-jprmr_7c12a4dd-a3df-4106-ab48-b628b89b3277/horizon/0.log" Sep 30 13:25:04 crc kubenswrapper[5002]: I0930 13:25:04.784289 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-7zdzw_61c70258-1787-4522-810a-af2ac9e07703/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:25:04 crc kubenswrapper[5002]: I0930 13:25:04.991611 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-66c799f4f6-jprmr_7c12a4dd-a3df-4106-ab48-b628b89b3277/horizon-log/0.log" Sep 30 13:25:05 crc kubenswrapper[5002]: I0930 13:25:05.000985 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-dpzsf_8ebc7091-7fed-4943-9bff-8d1d9ab3db90/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:25:05 crc kubenswrapper[5002]: I0930 13:25:05.154678 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29320621-l2g2n_321c3fc2-62d9-46a2-99b3-d1bc4e7e534c/keystone-cron/0.log" Sep 30 13:25:05 crc kubenswrapper[5002]: I0930 13:25:05.347177 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-84489c98f8-p85zv_1c943245-6e36-4812-9694-48a5c2747a90/keystone-api/0.log" Sep 30 13:25:05 crc kubenswrapper[5002]: I0930 13:25:05.357181 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_16e6726b-1f13-4bd9-a6a0-326e726dd86a/kube-state-metrics/0.log" Sep 30 13:25:05 crc kubenswrapper[5002]: I0930 13:25:05.420685 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-lzd9v_7a8b7e27-6872-47a1-b564-9a288ac7cef0/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:25:05 crc kubenswrapper[5002]: I0930 13:25:05.833530 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5c946cd5ff-n7x9t_1df12b89-f778-45b2-b39a-c95700262b6e/neutron-httpd/0.log" Sep 30 13:25:05 crc kubenswrapper[5002]: I0930 13:25:05.847281 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5c946cd5ff-n7x9t_1df12b89-f778-45b2-b39a-c95700262b6e/neutron-api/0.log" Sep 30 13:25:06 crc kubenswrapper[5002]: I0930 13:25:06.019557 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-tzptr_afe17fe9-74c6-442a-a2c3-70958d7a706b/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:25:06 crc kubenswrapper[5002]: I0930 13:25:06.536154 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_822ff6cf-9987-40ed-929e-615b255cc532/nova-api-log/0.log" Sep 30 13:25:06 crc kubenswrapper[5002]: I0930 13:25:06.760407 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_972b29bd-a22c-486e-ac29-6d075c3b26a7/nova-cell0-conductor-conductor/0.log" Sep 30 13:25:06 crc kubenswrapper[5002]: I0930 13:25:06.790950 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_822ff6cf-9987-40ed-929e-615b255cc532/nova-api-api/0.log" Sep 30 13:25:07 crc kubenswrapper[5002]: I0930 13:25:07.193694 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_18ab92e5-cde3-4728-9782-42025fa3d6b4/nova-cell1-novncproxy-novncproxy/0.log" Sep 30 13:25:07 crc kubenswrapper[5002]: I0930 13:25:07.202998 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_b6734a59-b52d-4116-bd32-31431b949757/nova-cell1-conductor-conductor/0.log" Sep 30 13:25:07 crc kubenswrapper[5002]: I0930 13:25:07.446017 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-txpxh_d81299d2-2b37-4c3f-b313-d02d7b33045c/nova-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:25:07 crc kubenswrapper[5002]: I0930 13:25:07.524940 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_cc36d9f0-c2f5-463f-8f7b-3824c9bd9629/nova-metadata-log/0.log" Sep 30 13:25:07 crc kubenswrapper[5002]: I0930 13:25:07.951129 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_dd4640b7-43e4-4029-88fa-bb9c4a293794/nova-scheduler-scheduler/0.log" Sep 30 13:25:08 crc kubenswrapper[5002]: I0930 13:25:08.062276 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_e109d20d-1925-4779-bbff-50bd39214d34/mysql-bootstrap/0.log" Sep 30 13:25:08 crc kubenswrapper[5002]: I0930 13:25:08.196768 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_e109d20d-1925-4779-bbff-50bd39214d34/mysql-bootstrap/0.log" Sep 30 13:25:08 crc kubenswrapper[5002]: I0930 13:25:08.249348 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_e109d20d-1925-4779-bbff-50bd39214d34/galera/0.log" Sep 30 13:25:08 crc kubenswrapper[5002]: I0930 13:25:08.484947 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_9d5d9337-f90f-4576-94dc-805d4e653801/mysql-bootstrap/0.log" Sep 30 13:25:08 crc kubenswrapper[5002]: I0930 13:25:08.645070 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_9d5d9337-f90f-4576-94dc-805d4e653801/mysql-bootstrap/0.log" Sep 30 13:25:08 crc kubenswrapper[5002]: I0930 13:25:08.725605 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_9d5d9337-f90f-4576-94dc-805d4e653801/galera/0.log" Sep 30 13:25:08 crc kubenswrapper[5002]: I0930 13:25:08.888546 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_369a872a-8dd2-409e-9938-2a01cd707dc8/openstackclient/0.log" Sep 30 13:25:09 crc kubenswrapper[5002]: I0930 13:25:09.030278 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_cc36d9f0-c2f5-463f-8f7b-3824c9bd9629/nova-metadata-metadata/0.log" Sep 30 13:25:09 crc kubenswrapper[5002]: I0930 13:25:09.082558 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-qztz9_1a0f9c34-465f-4f75-af75-71b2e2d3722a/openstack-network-exporter/0.log" Sep 30 13:25:09 crc kubenswrapper[5002]: I0930 13:25:09.290161 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5m55z_9d2f8d72-e223-4f96-917e-7b47baba18d8/ovsdb-server-init/0.log" Sep 30 13:25:09 crc kubenswrapper[5002]: I0930 13:25:09.451242 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5m55z_9d2f8d72-e223-4f96-917e-7b47baba18d8/ovsdb-server-init/0.log" Sep 30 13:25:09 crc kubenswrapper[5002]: I0930 13:25:09.466401 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5m55z_9d2f8d72-e223-4f96-917e-7b47baba18d8/ovs-vswitchd/0.log" Sep 30 13:25:09 crc kubenswrapper[5002]: I0930 13:25:09.480203 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5m55z_9d2f8d72-e223-4f96-917e-7b47baba18d8/ovsdb-server/0.log" Sep 30 13:25:09 crc kubenswrapper[5002]: I0930 13:25:09.620311 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-qq5zn_6b699340-4bbd-4df4-951b-9404b0545d24/ovn-controller/0.log" Sep 30 13:25:09 crc kubenswrapper[5002]: I0930 13:25:09.853928 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-4rc8x_71bf5034-6600-47c8-ad11-2855276e1356/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:25:09 crc kubenswrapper[5002]: I0930 13:25:09.940178 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_39ee2def-85c4-4070-9392-1f4d9fc2139c/openstack-network-exporter/0.log" Sep 30 13:25:10 crc kubenswrapper[5002]: I0930 13:25:10.040089 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_39ee2def-85c4-4070-9392-1f4d9fc2139c/ovn-northd/0.log" Sep 30 13:25:10 crc kubenswrapper[5002]: I0930 13:25:10.170300 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_1e148ee1-66bc-4300-a27a-a8c4ce515d51/openstack-network-exporter/0.log" Sep 30 13:25:10 crc kubenswrapper[5002]: I0930 13:25:10.251012 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_1e148ee1-66bc-4300-a27a-a8c4ce515d51/ovsdbserver-nb/0.log" Sep 30 13:25:10 crc kubenswrapper[5002]: I0930 13:25:10.360233 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_7304f60c-c750-409f-bca4-4fd12c239891/openstack-network-exporter/0.log" Sep 30 13:25:10 crc kubenswrapper[5002]: I0930 13:25:10.433031 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_7304f60c-c750-409f-bca4-4fd12c239891/ovsdbserver-sb/0.log" Sep 30 13:25:10 crc kubenswrapper[5002]: I0930 13:25:10.668438 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6f8969955b-64x4f_c0d09801-8aec-455d-8e90-cad2e5f5a04e/placement-api/0.log" Sep 30 13:25:10 crc kubenswrapper[5002]: I0930 13:25:10.774586 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6f8969955b-64x4f_c0d09801-8aec-455d-8e90-cad2e5f5a04e/placement-log/0.log" Sep 30 13:25:10 crc kubenswrapper[5002]: I0930 13:25:10.850962 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_4069e130-8a6c-4bf6-9885-b8e35857e519/setup-container/0.log" Sep 30 13:25:11 crc kubenswrapper[5002]: I0930 13:25:11.052142 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_4069e130-8a6c-4bf6-9885-b8e35857e519/setup-container/0.log" Sep 30 13:25:11 crc kubenswrapper[5002]: I0930 13:25:11.052716 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_4069e130-8a6c-4bf6-9885-b8e35857e519/rabbitmq/0.log" Sep 30 13:25:11 crc kubenswrapper[5002]: I0930 13:25:11.248805 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9f3cd025-3ba6-453b-9224-ee63cf57890c/setup-container/0.log" Sep 30 13:25:11 crc kubenswrapper[5002]: I0930 13:25:11.429813 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9f3cd025-3ba6-453b-9224-ee63cf57890c/rabbitmq/0.log" Sep 30 13:25:11 crc kubenswrapper[5002]: I0930 13:25:11.439160 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9f3cd025-3ba6-453b-9224-ee63cf57890c/setup-container/0.log" Sep 30 13:25:11 crc kubenswrapper[5002]: I0930 13:25:11.647497 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-p8db7_d6667a58-0566-4c47-8516-b46bed2a0f65/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:25:11 crc kubenswrapper[5002]: I0930 13:25:11.685591 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-mqcxz_ad03eb09-92a2-4d00-9290-8b142d71fea6/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:25:11 crc kubenswrapper[5002]: I0930 13:25:11.849862 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-vswq9_67886def-b13b-463d-a4f6-3a0d13fa9580/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:25:12 crc kubenswrapper[5002]: I0930 13:25:12.020700 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-f2n6c_0d552fc4-7e51-426b-99ff-9ed8753f4178/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:25:12 crc kubenswrapper[5002]: I0930 13:25:12.127241 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-w2tmz_13e7455b-ca1c-475f-95af-c9813c0876f7/ssh-known-hosts-edpm-deployment/0.log" Sep 30 13:25:12 crc kubenswrapper[5002]: I0930 13:25:12.463251 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-85654c5dc5-xmznd_c487a894-05f0-4ed3-9b0a-fc5bfbae3f74/proxy-server/0.log" Sep 30 13:25:12 crc kubenswrapper[5002]: I0930 13:25:12.498021 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-85654c5dc5-xmznd_c487a894-05f0-4ed3-9b0a-fc5bfbae3f74/proxy-httpd/0.log" Sep 30 13:25:12 crc kubenswrapper[5002]: I0930 13:25:12.644829 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-hrs89_8a1d9b02-1faf-4a01-82a1-d71e4c154f57/swift-ring-rebalance/0.log" Sep 30 13:25:12 crc kubenswrapper[5002]: I0930 13:25:12.762871 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/account-auditor/0.log" Sep 30 13:25:13 crc kubenswrapper[5002]: I0930 13:25:13.028068 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/account-reaper/0.log" Sep 30 13:25:13 crc kubenswrapper[5002]: I0930 13:25:13.128657 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/account-server/0.log" Sep 30 13:25:13 crc kubenswrapper[5002]: I0930 13:25:13.141041 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/account-replicator/0.log" Sep 30 13:25:13 crc kubenswrapper[5002]: I0930 13:25:13.250351 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/container-auditor/0.log" Sep 30 13:25:13 crc kubenswrapper[5002]: I0930 13:25:13.378952 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/container-server/0.log" Sep 30 13:25:13 crc kubenswrapper[5002]: I0930 13:25:13.383284 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/container-replicator/0.log" Sep 30 13:25:13 crc kubenswrapper[5002]: I0930 13:25:13.431016 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/container-updater/0.log" Sep 30 13:25:13 crc kubenswrapper[5002]: I0930 13:25:13.575911 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/object-expirer/0.log" Sep 30 13:25:13 crc kubenswrapper[5002]: I0930 13:25:13.626368 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/object-auditor/0.log" Sep 30 13:25:13 crc kubenswrapper[5002]: I0930 13:25:13.651834 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/object-replicator/0.log" Sep 30 13:25:13 crc kubenswrapper[5002]: I0930 13:25:13.761020 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/object-server/0.log" Sep 30 13:25:13 crc kubenswrapper[5002]: I0930 13:25:13.837670 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/object-updater/0.log" Sep 30 13:25:13 crc kubenswrapper[5002]: I0930 13:25:13.867315 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/rsync/0.log" Sep 30 13:25:14 crc kubenswrapper[5002]: I0930 13:25:14.026267 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7a32bf0-614c-479f-986e-3f954c27ad1f/swift-recon-cron/0.log" Sep 30 13:25:14 crc kubenswrapper[5002]: I0930 13:25:14.227220 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-r748r_5614484d-58b4-42e2-94a5-dda83b89be64/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:25:14 crc kubenswrapper[5002]: I0930 13:25:14.327710 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_32bc4a2b-b531-4126-8920-ec50156dc863/tempest-tests-tempest-tests-runner/0.log" Sep 30 13:25:14 crc kubenswrapper[5002]: I0930 13:25:14.469094 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_50ed8d15-6cc0-4e0f-9116-35c93f45d6d4/test-operator-logs-container/0.log" Sep 30 13:25:14 crc kubenswrapper[5002]: I0930 13:25:14.685122 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-64qkx_a049d73d-c168-40c1-a943-9df4f221879a/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 13:25:27 crc kubenswrapper[5002]: I0930 13:25:27.143629 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_7864c645-ac32-48bb-a292-0ff4ec2a5955/memcached/0.log" Sep 30 13:25:57 crc kubenswrapper[5002]: I0930 13:25:57.212147 5002 generic.go:334] "Generic (PLEG): container finished" podID="4038ded3-a825-4803-951d-9e57ee43f00c" containerID="65f3b243c81108de2aff460e6f96d90af24fc09cc9f23c07a8b06261dceafb7d" exitCode=0 Sep 30 13:25:57 crc kubenswrapper[5002]: I0930 13:25:57.212253 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-smlkm/crc-debug-x5j2l" event={"ID":"4038ded3-a825-4803-951d-9e57ee43f00c","Type":"ContainerDied","Data":"65f3b243c81108de2aff460e6f96d90af24fc09cc9f23c07a8b06261dceafb7d"} Sep 30 13:25:58 crc kubenswrapper[5002]: I0930 13:25:58.352567 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-smlkm/crc-debug-x5j2l" Sep 30 13:25:58 crc kubenswrapper[5002]: I0930 13:25:58.384380 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-smlkm/crc-debug-x5j2l"] Sep 30 13:25:58 crc kubenswrapper[5002]: I0930 13:25:58.392520 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-smlkm/crc-debug-x5j2l"] Sep 30 13:25:58 crc kubenswrapper[5002]: I0930 13:25:58.460654 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9gmt\" (UniqueName: \"kubernetes.io/projected/4038ded3-a825-4803-951d-9e57ee43f00c-kube-api-access-w9gmt\") pod \"4038ded3-a825-4803-951d-9e57ee43f00c\" (UID: \"4038ded3-a825-4803-951d-9e57ee43f00c\") " Sep 30 13:25:58 crc kubenswrapper[5002]: I0930 13:25:58.460947 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4038ded3-a825-4803-951d-9e57ee43f00c-host\") pod \"4038ded3-a825-4803-951d-9e57ee43f00c\" (UID: \"4038ded3-a825-4803-951d-9e57ee43f00c\") " Sep 30 13:25:58 crc kubenswrapper[5002]: I0930 13:25:58.461010 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4038ded3-a825-4803-951d-9e57ee43f00c-host" (OuterVolumeSpecName: "host") pod "4038ded3-a825-4803-951d-9e57ee43f00c" (UID: "4038ded3-a825-4803-951d-9e57ee43f00c"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:25:58 crc kubenswrapper[5002]: I0930 13:25:58.461597 5002 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4038ded3-a825-4803-951d-9e57ee43f00c-host\") on node \"crc\" DevicePath \"\"" Sep 30 13:25:58 crc kubenswrapper[5002]: I0930 13:25:58.466070 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4038ded3-a825-4803-951d-9e57ee43f00c-kube-api-access-w9gmt" (OuterVolumeSpecName: "kube-api-access-w9gmt") pod "4038ded3-a825-4803-951d-9e57ee43f00c" (UID: "4038ded3-a825-4803-951d-9e57ee43f00c"). InnerVolumeSpecName "kube-api-access-w9gmt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:25:58 crc kubenswrapper[5002]: I0930 13:25:58.563675 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9gmt\" (UniqueName: \"kubernetes.io/projected/4038ded3-a825-4803-951d-9e57ee43f00c-kube-api-access-w9gmt\") on node \"crc\" DevicePath \"\"" Sep 30 13:25:58 crc kubenswrapper[5002]: I0930 13:25:58.691344 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4038ded3-a825-4803-951d-9e57ee43f00c" path="/var/lib/kubelet/pods/4038ded3-a825-4803-951d-9e57ee43f00c/volumes" Sep 30 13:25:59 crc kubenswrapper[5002]: I0930 13:25:59.236004 5002 scope.go:117] "RemoveContainer" containerID="65f3b243c81108de2aff460e6f96d90af24fc09cc9f23c07a8b06261dceafb7d" Sep 30 13:25:59 crc kubenswrapper[5002]: I0930 13:25:59.236120 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-smlkm/crc-debug-x5j2l" Sep 30 13:25:59 crc kubenswrapper[5002]: I0930 13:25:59.574125 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-smlkm/crc-debug-dg5tc"] Sep 30 13:25:59 crc kubenswrapper[5002]: E0930 13:25:59.574738 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4038ded3-a825-4803-951d-9e57ee43f00c" containerName="container-00" Sep 30 13:25:59 crc kubenswrapper[5002]: I0930 13:25:59.574760 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="4038ded3-a825-4803-951d-9e57ee43f00c" containerName="container-00" Sep 30 13:25:59 crc kubenswrapper[5002]: I0930 13:25:59.575078 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="4038ded3-a825-4803-951d-9e57ee43f00c" containerName="container-00" Sep 30 13:25:59 crc kubenswrapper[5002]: I0930 13:25:59.576067 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-smlkm/crc-debug-dg5tc" Sep 30 13:25:59 crc kubenswrapper[5002]: I0930 13:25:59.579296 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-smlkm"/"default-dockercfg-fk85c" Sep 30 13:25:59 crc kubenswrapper[5002]: I0930 13:25:59.686728 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/906c7f03-0606-4acc-9705-5cc8b8b569f0-host\") pod \"crc-debug-dg5tc\" (UID: \"906c7f03-0606-4acc-9705-5cc8b8b569f0\") " pod="openshift-must-gather-smlkm/crc-debug-dg5tc" Sep 30 13:25:59 crc kubenswrapper[5002]: I0930 13:25:59.686945 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25dqx\" (UniqueName: \"kubernetes.io/projected/906c7f03-0606-4acc-9705-5cc8b8b569f0-kube-api-access-25dqx\") pod \"crc-debug-dg5tc\" (UID: \"906c7f03-0606-4acc-9705-5cc8b8b569f0\") " pod="openshift-must-gather-smlkm/crc-debug-dg5tc" Sep 30 13:25:59 crc kubenswrapper[5002]: I0930 13:25:59.788569 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25dqx\" (UniqueName: \"kubernetes.io/projected/906c7f03-0606-4acc-9705-5cc8b8b569f0-kube-api-access-25dqx\") pod \"crc-debug-dg5tc\" (UID: \"906c7f03-0606-4acc-9705-5cc8b8b569f0\") " pod="openshift-must-gather-smlkm/crc-debug-dg5tc" Sep 30 13:25:59 crc kubenswrapper[5002]: I0930 13:25:59.788704 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/906c7f03-0606-4acc-9705-5cc8b8b569f0-host\") pod \"crc-debug-dg5tc\" (UID: \"906c7f03-0606-4acc-9705-5cc8b8b569f0\") " pod="openshift-must-gather-smlkm/crc-debug-dg5tc" Sep 30 13:25:59 crc kubenswrapper[5002]: I0930 13:25:59.788850 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/906c7f03-0606-4acc-9705-5cc8b8b569f0-host\") pod \"crc-debug-dg5tc\" (UID: \"906c7f03-0606-4acc-9705-5cc8b8b569f0\") " pod="openshift-must-gather-smlkm/crc-debug-dg5tc" Sep 30 13:25:59 crc kubenswrapper[5002]: I0930 13:25:59.814578 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25dqx\" (UniqueName: \"kubernetes.io/projected/906c7f03-0606-4acc-9705-5cc8b8b569f0-kube-api-access-25dqx\") pod \"crc-debug-dg5tc\" (UID: \"906c7f03-0606-4acc-9705-5cc8b8b569f0\") " pod="openshift-must-gather-smlkm/crc-debug-dg5tc" Sep 30 13:25:59 crc kubenswrapper[5002]: I0930 13:25:59.896962 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-smlkm/crc-debug-dg5tc" Sep 30 13:26:00 crc kubenswrapper[5002]: I0930 13:26:00.247671 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-smlkm/crc-debug-dg5tc" event={"ID":"906c7f03-0606-4acc-9705-5cc8b8b569f0","Type":"ContainerStarted","Data":"46ae58ad54f9fbdc4940a13dbf1efefe4ed4b35ad1b6a0b3fa49d67fb0bb1296"} Sep 30 13:26:00 crc kubenswrapper[5002]: I0930 13:26:00.247715 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-smlkm/crc-debug-dg5tc" event={"ID":"906c7f03-0606-4acc-9705-5cc8b8b569f0","Type":"ContainerStarted","Data":"4c507ecddcaebc7de9e1d51927c9c3bc2d4c4b7b3f02aa9da0232c8aafa06fb7"} Sep 30 13:26:00 crc kubenswrapper[5002]: I0930 13:26:00.262705 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-smlkm/crc-debug-dg5tc" podStartSLOduration=1.2626870700000001 podStartE2EDuration="1.26268707s" podCreationTimestamp="2025-09-30 13:25:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:26:00.262226347 +0000 UTC m=+3934.511908493" watchObservedRunningTime="2025-09-30 13:26:00.26268707 +0000 UTC m=+3934.512369206" Sep 30 13:26:01 crc kubenswrapper[5002]: I0930 13:26:01.259512 5002 generic.go:334] "Generic (PLEG): container finished" podID="906c7f03-0606-4acc-9705-5cc8b8b569f0" containerID="46ae58ad54f9fbdc4940a13dbf1efefe4ed4b35ad1b6a0b3fa49d67fb0bb1296" exitCode=0 Sep 30 13:26:01 crc kubenswrapper[5002]: I0930 13:26:01.259666 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-smlkm/crc-debug-dg5tc" event={"ID":"906c7f03-0606-4acc-9705-5cc8b8b569f0","Type":"ContainerDied","Data":"46ae58ad54f9fbdc4940a13dbf1efefe4ed4b35ad1b6a0b3fa49d67fb0bb1296"} Sep 30 13:26:02 crc kubenswrapper[5002]: I0930 13:26:02.363528 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-smlkm/crc-debug-dg5tc" Sep 30 13:26:02 crc kubenswrapper[5002]: I0930 13:26:02.525764 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/906c7f03-0606-4acc-9705-5cc8b8b569f0-host\") pod \"906c7f03-0606-4acc-9705-5cc8b8b569f0\" (UID: \"906c7f03-0606-4acc-9705-5cc8b8b569f0\") " Sep 30 13:26:02 crc kubenswrapper[5002]: I0930 13:26:02.525890 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/906c7f03-0606-4acc-9705-5cc8b8b569f0-host" (OuterVolumeSpecName: "host") pod "906c7f03-0606-4acc-9705-5cc8b8b569f0" (UID: "906c7f03-0606-4acc-9705-5cc8b8b569f0"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:26:02 crc kubenswrapper[5002]: I0930 13:26:02.525960 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25dqx\" (UniqueName: \"kubernetes.io/projected/906c7f03-0606-4acc-9705-5cc8b8b569f0-kube-api-access-25dqx\") pod \"906c7f03-0606-4acc-9705-5cc8b8b569f0\" (UID: \"906c7f03-0606-4acc-9705-5cc8b8b569f0\") " Sep 30 13:26:02 crc kubenswrapper[5002]: I0930 13:26:02.526389 5002 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/906c7f03-0606-4acc-9705-5cc8b8b569f0-host\") on node \"crc\" DevicePath \"\"" Sep 30 13:26:02 crc kubenswrapper[5002]: I0930 13:26:02.535585 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/906c7f03-0606-4acc-9705-5cc8b8b569f0-kube-api-access-25dqx" (OuterVolumeSpecName: "kube-api-access-25dqx") pod "906c7f03-0606-4acc-9705-5cc8b8b569f0" (UID: "906c7f03-0606-4acc-9705-5cc8b8b569f0"). InnerVolumeSpecName "kube-api-access-25dqx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:26:02 crc kubenswrapper[5002]: I0930 13:26:02.627577 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25dqx\" (UniqueName: \"kubernetes.io/projected/906c7f03-0606-4acc-9705-5cc8b8b569f0-kube-api-access-25dqx\") on node \"crc\" DevicePath \"\"" Sep 30 13:26:03 crc kubenswrapper[5002]: I0930 13:26:03.277983 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-smlkm/crc-debug-dg5tc" event={"ID":"906c7f03-0606-4acc-9705-5cc8b8b569f0","Type":"ContainerDied","Data":"4c507ecddcaebc7de9e1d51927c9c3bc2d4c4b7b3f02aa9da0232c8aafa06fb7"} Sep 30 13:26:03 crc kubenswrapper[5002]: I0930 13:26:03.278028 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4c507ecddcaebc7de9e1d51927c9c3bc2d4c4b7b3f02aa9da0232c8aafa06fb7" Sep 30 13:26:03 crc kubenswrapper[5002]: I0930 13:26:03.278086 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-smlkm/crc-debug-dg5tc" Sep 30 13:26:06 crc kubenswrapper[5002]: I0930 13:26:06.961915 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-smlkm/crc-debug-dg5tc"] Sep 30 13:26:06 crc kubenswrapper[5002]: I0930 13:26:06.970572 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-smlkm/crc-debug-dg5tc"] Sep 30 13:26:08 crc kubenswrapper[5002]: I0930 13:26:08.135272 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-smlkm/crc-debug-mf7zj"] Sep 30 13:26:08 crc kubenswrapper[5002]: E0930 13:26:08.136036 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="906c7f03-0606-4acc-9705-5cc8b8b569f0" containerName="container-00" Sep 30 13:26:08 crc kubenswrapper[5002]: I0930 13:26:08.136052 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="906c7f03-0606-4acc-9705-5cc8b8b569f0" containerName="container-00" Sep 30 13:26:08 crc kubenswrapper[5002]: I0930 13:26:08.137092 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="906c7f03-0606-4acc-9705-5cc8b8b569f0" containerName="container-00" Sep 30 13:26:08 crc kubenswrapper[5002]: I0930 13:26:08.138870 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-smlkm/crc-debug-mf7zj" Sep 30 13:26:08 crc kubenswrapper[5002]: I0930 13:26:08.141332 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-smlkm"/"default-dockercfg-fk85c" Sep 30 13:26:08 crc kubenswrapper[5002]: I0930 13:26:08.221208 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fac27c03-7314-47bf-bc92-f750d6a303bc-host\") pod \"crc-debug-mf7zj\" (UID: \"fac27c03-7314-47bf-bc92-f750d6a303bc\") " pod="openshift-must-gather-smlkm/crc-debug-mf7zj" Sep 30 13:26:08 crc kubenswrapper[5002]: I0930 13:26:08.221320 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bllk9\" (UniqueName: \"kubernetes.io/projected/fac27c03-7314-47bf-bc92-f750d6a303bc-kube-api-access-bllk9\") pod \"crc-debug-mf7zj\" (UID: \"fac27c03-7314-47bf-bc92-f750d6a303bc\") " pod="openshift-must-gather-smlkm/crc-debug-mf7zj" Sep 30 13:26:08 crc kubenswrapper[5002]: I0930 13:26:08.323105 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fac27c03-7314-47bf-bc92-f750d6a303bc-host\") pod \"crc-debug-mf7zj\" (UID: \"fac27c03-7314-47bf-bc92-f750d6a303bc\") " pod="openshift-must-gather-smlkm/crc-debug-mf7zj" Sep 30 13:26:08 crc kubenswrapper[5002]: I0930 13:26:08.323186 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bllk9\" (UniqueName: \"kubernetes.io/projected/fac27c03-7314-47bf-bc92-f750d6a303bc-kube-api-access-bllk9\") pod \"crc-debug-mf7zj\" (UID: \"fac27c03-7314-47bf-bc92-f750d6a303bc\") " pod="openshift-must-gather-smlkm/crc-debug-mf7zj" Sep 30 13:26:08 crc kubenswrapper[5002]: I0930 13:26:08.323259 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fac27c03-7314-47bf-bc92-f750d6a303bc-host\") pod \"crc-debug-mf7zj\" (UID: \"fac27c03-7314-47bf-bc92-f750d6a303bc\") " pod="openshift-must-gather-smlkm/crc-debug-mf7zj" Sep 30 13:26:08 crc kubenswrapper[5002]: I0930 13:26:08.341682 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bllk9\" (UniqueName: \"kubernetes.io/projected/fac27c03-7314-47bf-bc92-f750d6a303bc-kube-api-access-bllk9\") pod \"crc-debug-mf7zj\" (UID: \"fac27c03-7314-47bf-bc92-f750d6a303bc\") " pod="openshift-must-gather-smlkm/crc-debug-mf7zj" Sep 30 13:26:08 crc kubenswrapper[5002]: I0930 13:26:08.458021 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-smlkm/crc-debug-mf7zj" Sep 30 13:26:08 crc kubenswrapper[5002]: I0930 13:26:08.690818 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="906c7f03-0606-4acc-9705-5cc8b8b569f0" path="/var/lib/kubelet/pods/906c7f03-0606-4acc-9705-5cc8b8b569f0/volumes" Sep 30 13:26:09 crc kubenswrapper[5002]: I0930 13:26:09.335807 5002 generic.go:334] "Generic (PLEG): container finished" podID="fac27c03-7314-47bf-bc92-f750d6a303bc" containerID="b60b08c5a0afcc2c7fdc2a4f1100ebc1d679acf955db9259ccabc506ba4ff000" exitCode=0 Sep 30 13:26:09 crc kubenswrapper[5002]: I0930 13:26:09.335917 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-smlkm/crc-debug-mf7zj" event={"ID":"fac27c03-7314-47bf-bc92-f750d6a303bc","Type":"ContainerDied","Data":"b60b08c5a0afcc2c7fdc2a4f1100ebc1d679acf955db9259ccabc506ba4ff000"} Sep 30 13:26:09 crc kubenswrapper[5002]: I0930 13:26:09.336153 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-smlkm/crc-debug-mf7zj" event={"ID":"fac27c03-7314-47bf-bc92-f750d6a303bc","Type":"ContainerStarted","Data":"5b1b1bed3de3b68c625e2ea05ca543f9a5b28643c1524da4386079afd652c93d"} Sep 30 13:26:09 crc kubenswrapper[5002]: I0930 13:26:09.383362 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-smlkm/crc-debug-mf7zj"] Sep 30 13:26:09 crc kubenswrapper[5002]: I0930 13:26:09.394188 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-smlkm/crc-debug-mf7zj"] Sep 30 13:26:10 crc kubenswrapper[5002]: I0930 13:26:10.437953 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-smlkm/crc-debug-mf7zj" Sep 30 13:26:10 crc kubenswrapper[5002]: I0930 13:26:10.568384 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bllk9\" (UniqueName: \"kubernetes.io/projected/fac27c03-7314-47bf-bc92-f750d6a303bc-kube-api-access-bllk9\") pod \"fac27c03-7314-47bf-bc92-f750d6a303bc\" (UID: \"fac27c03-7314-47bf-bc92-f750d6a303bc\") " Sep 30 13:26:10 crc kubenswrapper[5002]: I0930 13:26:10.568675 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fac27c03-7314-47bf-bc92-f750d6a303bc-host\") pod \"fac27c03-7314-47bf-bc92-f750d6a303bc\" (UID: \"fac27c03-7314-47bf-bc92-f750d6a303bc\") " Sep 30 13:26:10 crc kubenswrapper[5002]: I0930 13:26:10.568834 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fac27c03-7314-47bf-bc92-f750d6a303bc-host" (OuterVolumeSpecName: "host") pod "fac27c03-7314-47bf-bc92-f750d6a303bc" (UID: "fac27c03-7314-47bf-bc92-f750d6a303bc"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:26:10 crc kubenswrapper[5002]: I0930 13:26:10.569218 5002 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fac27c03-7314-47bf-bc92-f750d6a303bc-host\") on node \"crc\" DevicePath \"\"" Sep 30 13:26:10 crc kubenswrapper[5002]: I0930 13:26:10.578680 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fac27c03-7314-47bf-bc92-f750d6a303bc-kube-api-access-bllk9" (OuterVolumeSpecName: "kube-api-access-bllk9") pod "fac27c03-7314-47bf-bc92-f750d6a303bc" (UID: "fac27c03-7314-47bf-bc92-f750d6a303bc"). InnerVolumeSpecName "kube-api-access-bllk9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:26:10 crc kubenswrapper[5002]: I0930 13:26:10.671175 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bllk9\" (UniqueName: \"kubernetes.io/projected/fac27c03-7314-47bf-bc92-f750d6a303bc-kube-api-access-bllk9\") on node \"crc\" DevicePath \"\"" Sep 30 13:26:10 crc kubenswrapper[5002]: I0930 13:26:10.685642 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fac27c03-7314-47bf-bc92-f750d6a303bc" path="/var/lib/kubelet/pods/fac27c03-7314-47bf-bc92-f750d6a303bc/volumes" Sep 30 13:26:11 crc kubenswrapper[5002]: I0930 13:26:11.072670 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-mf4jp_636ed6c4-281d-4ea2-be99-a04e07b08170/kube-rbac-proxy/0.log" Sep 30 13:26:11 crc kubenswrapper[5002]: I0930 13:26:11.138262 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-mf4jp_636ed6c4-281d-4ea2-be99-a04e07b08170/manager/0.log" Sep 30 13:26:11 crc kubenswrapper[5002]: I0930 13:26:11.277152 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-ph95z_8eefd962-2b74-4b77-8bc9-338b8ccfd0cf/kube-rbac-proxy/0.log" Sep 30 13:26:11 crc kubenswrapper[5002]: I0930 13:26:11.323648 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-ph95z_8eefd962-2b74-4b77-8bc9-338b8ccfd0cf/manager/0.log" Sep 30 13:26:11 crc kubenswrapper[5002]: I0930 13:26:11.353605 5002 scope.go:117] "RemoveContainer" containerID="b60b08c5a0afcc2c7fdc2a4f1100ebc1d679acf955db9259ccabc506ba4ff000" Sep 30 13:26:11 crc kubenswrapper[5002]: I0930 13:26:11.353627 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-smlkm/crc-debug-mf7zj" Sep 30 13:26:11 crc kubenswrapper[5002]: I0930 13:26:11.411017 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-rvv9g_9caa184d-b4ff-4419-8f8d-ede2b0b6845e/kube-rbac-proxy/0.log" Sep 30 13:26:11 crc kubenswrapper[5002]: I0930 13:26:11.486105 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-rvv9g_9caa184d-b4ff-4419-8f8d-ede2b0b6845e/manager/0.log" Sep 30 13:26:11 crc kubenswrapper[5002]: I0930 13:26:11.560742 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf_bd5d2600-fe6f-407b-8110-97516c3117cb/util/0.log" Sep 30 13:26:11 crc kubenswrapper[5002]: I0930 13:26:11.721495 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf_bd5d2600-fe6f-407b-8110-97516c3117cb/util/0.log" Sep 30 13:26:11 crc kubenswrapper[5002]: I0930 13:26:11.743527 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf_bd5d2600-fe6f-407b-8110-97516c3117cb/pull/0.log" Sep 30 13:26:11 crc kubenswrapper[5002]: I0930 13:26:11.761132 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf_bd5d2600-fe6f-407b-8110-97516c3117cb/pull/0.log" Sep 30 13:26:11 crc kubenswrapper[5002]: I0930 13:26:11.869916 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf_bd5d2600-fe6f-407b-8110-97516c3117cb/util/0.log" Sep 30 13:26:11 crc kubenswrapper[5002]: I0930 13:26:11.922603 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf_bd5d2600-fe6f-407b-8110-97516c3117cb/pull/0.log" Sep 30 13:26:11 crc kubenswrapper[5002]: I0930 13:26:11.944801 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fcb715ece9f005e1a03c130eeea8b9b209953c0686aefe28df3e5ad2fftkcgf_bd5d2600-fe6f-407b-8110-97516c3117cb/extract/0.log" Sep 30 13:26:12 crc kubenswrapper[5002]: I0930 13:26:12.068688 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-chbd7_d44fe72c-afa7-4442-b308-0b111e16c7b8/kube-rbac-proxy/0.log" Sep 30 13:26:12 crc kubenswrapper[5002]: I0930 13:26:12.134800 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-chbd7_d44fe72c-afa7-4442-b308-0b111e16c7b8/manager/0.log" Sep 30 13:26:12 crc kubenswrapper[5002]: I0930 13:26:12.204382 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-sjnlc_63ee8874-1cbb-4183-b16a-f2efd8a1e7d6/kube-rbac-proxy/0.log" Sep 30 13:26:12 crc kubenswrapper[5002]: I0930 13:26:12.230756 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-sjnlc_63ee8874-1cbb-4183-b16a-f2efd8a1e7d6/manager/0.log" Sep 30 13:26:12 crc kubenswrapper[5002]: I0930 13:26:12.359258 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-42j7m_aa7bbe9e-668f-4ee3-976d-8e9ddbdbb092/kube-rbac-proxy/0.log" Sep 30 13:26:12 crc kubenswrapper[5002]: I0930 13:26:12.384080 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-42j7m_aa7bbe9e-668f-4ee3-976d-8e9ddbdbb092/manager/0.log" Sep 30 13:26:12 crc kubenswrapper[5002]: I0930 13:26:12.540619 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d857cc749-qcq76_7c75d96a-d0a7-4f12-9799-4d01ee215248/kube-rbac-proxy/0.log" Sep 30 13:26:12 crc kubenswrapper[5002]: I0930 13:26:12.672218 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-r7t4w_720e0316-9060-4bd3-804c-f98017a3fb84/kube-rbac-proxy/0.log" Sep 30 13:26:12 crc kubenswrapper[5002]: I0930 13:26:12.712176 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d857cc749-qcq76_7c75d96a-d0a7-4f12-9799-4d01ee215248/manager/0.log" Sep 30 13:26:12 crc kubenswrapper[5002]: I0930 13:26:12.750347 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-r7t4w_720e0316-9060-4bd3-804c-f98017a3fb84/manager/0.log" Sep 30 13:26:12 crc kubenswrapper[5002]: I0930 13:26:12.854170 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-m7m4h_9692c8b9-1e89-47e3-972c-1af7eb8a2ebe/kube-rbac-proxy/0.log" Sep 30 13:26:12 crc kubenswrapper[5002]: I0930 13:26:12.946454 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-m7m4h_9692c8b9-1e89-47e3-972c-1af7eb8a2ebe/manager/0.log" Sep 30 13:26:13 crc kubenswrapper[5002]: I0930 13:26:13.046689 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-zzt9s_855868b4-991b-4f9f-b471-5b1244221192/manager/0.log" Sep 30 13:26:13 crc kubenswrapper[5002]: I0930 13:26:13.075171 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-zzt9s_855868b4-991b-4f9f-b471-5b1244221192/kube-rbac-proxy/0.log" Sep 30 13:26:13 crc kubenswrapper[5002]: I0930 13:26:13.138582 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-sxv7p_aa376f72-5b9a-4087-8ea6-a5cf80be315b/kube-rbac-proxy/0.log" Sep 30 13:26:13 crc kubenswrapper[5002]: I0930 13:26:13.241615 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-sxv7p_aa376f72-5b9a-4087-8ea6-a5cf80be315b/manager/0.log" Sep 30 13:26:13 crc kubenswrapper[5002]: I0930 13:26:13.321779 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-hcr66_6413b1d0-7f0f-4bca-88e9-90a9d78bff9c/kube-rbac-proxy/0.log" Sep 30 13:26:13 crc kubenswrapper[5002]: I0930 13:26:13.370048 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-hcr66_6413b1d0-7f0f-4bca-88e9-90a9d78bff9c/manager/0.log" Sep 30 13:26:13 crc kubenswrapper[5002]: I0930 13:26:13.476696 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-gh66v_2d9e02f5-4644-423a-a783-8dbc51d68570/kube-rbac-proxy/0.log" Sep 30 13:26:13 crc kubenswrapper[5002]: I0930 13:26:13.571248 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-gh66v_2d9e02f5-4644-423a-a783-8dbc51d68570/manager/0.log" Sep 30 13:26:13 crc kubenswrapper[5002]: I0930 13:26:13.635677 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-xwlmz_38eab40a-26bf-4c1b-8911-4d6672629e3e/kube-rbac-proxy/0.log" Sep 30 13:26:13 crc kubenswrapper[5002]: I0930 13:26:13.665133 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-xwlmz_38eab40a-26bf-4c1b-8911-4d6672629e3e/manager/0.log" Sep 30 13:26:13 crc kubenswrapper[5002]: I0930 13:26:13.795008 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-c7j6g_5d79f7da-8bfd-4f26-bcf1-b4ad36a6b42f/kube-rbac-proxy/0.log" Sep 30 13:26:13 crc kubenswrapper[5002]: I0930 13:26:13.818030 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-c7j6g_5d79f7da-8bfd-4f26-bcf1-b4ad36a6b42f/manager/0.log" Sep 30 13:26:13 crc kubenswrapper[5002]: I0930 13:26:13.940843 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-84697dfb4d-w86tm_32ebad0d-e677-4e33-b555-44db1541becc/kube-rbac-proxy/0.log" Sep 30 13:26:14 crc kubenswrapper[5002]: I0930 13:26:14.379436 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-b45798bf6-lpg2f_362e8e35-0b7c-4c4f-9db2-9c3d56a76d2f/kube-rbac-proxy/0.log" Sep 30 13:26:14 crc kubenswrapper[5002]: I0930 13:26:14.454853 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-b45798bf6-lpg2f_362e8e35-0b7c-4c4f-9db2-9c3d56a76d2f/operator/0.log" Sep 30 13:26:14 crc kubenswrapper[5002]: I0930 13:26:14.501687 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-gc6rt_17eab1ee-1e5f-4092-80e0-77d8a4ca4016/registry-server/0.log" Sep 30 13:26:14 crc kubenswrapper[5002]: I0930 13:26:14.622155 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-kl4qt_2607efd2-d90d-4a1e-be6c-5f3c88da67e4/kube-rbac-proxy/0.log" Sep 30 13:26:14 crc kubenswrapper[5002]: I0930 13:26:14.816578 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-kl4qt_2607efd2-d90d-4a1e-be6c-5f3c88da67e4/manager/0.log" Sep 30 13:26:14 crc kubenswrapper[5002]: I0930 13:26:14.870744 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-57btn_0d7297eb-3633-4083-9d4b-3bf8487360ca/manager/0.log" Sep 30 13:26:14 crc kubenswrapper[5002]: I0930 13:26:14.912288 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-57btn_0d7297eb-3633-4083-9d4b-3bf8487360ca/kube-rbac-proxy/0.log" Sep 30 13:26:15 crc kubenswrapper[5002]: I0930 13:26:15.051841 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-84697dfb4d-w86tm_32ebad0d-e677-4e33-b555-44db1541becc/manager/0.log" Sep 30 13:26:15 crc kubenswrapper[5002]: I0930 13:26:15.055848 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-79d8469568-h9dzh_b4044bb7-6e63-45d0-9640-7966bcd23aa9/operator/0.log" Sep 30 13:26:15 crc kubenswrapper[5002]: I0930 13:26:15.126693 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-d9jtf_90cc681e-c24d-4b64-862d-3514308e77c6/kube-rbac-proxy/0.log" Sep 30 13:26:15 crc kubenswrapper[5002]: I0930 13:26:15.218098 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-7662l_78f98b71-27e2-411c-b610-8b4be1068d5a/kube-rbac-proxy/0.log" Sep 30 13:26:15 crc kubenswrapper[5002]: I0930 13:26:15.261890 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-d9jtf_90cc681e-c24d-4b64-862d-3514308e77c6/manager/0.log" Sep 30 13:26:15 crc kubenswrapper[5002]: I0930 13:26:15.299321 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-7662l_78f98b71-27e2-411c-b610-8b4be1068d5a/manager/0.log" Sep 30 13:26:15 crc kubenswrapper[5002]: I0930 13:26:15.396000 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-z2dcj_0fa1c573-cbf9-43f1-8106-7cf73e93f1f3/kube-rbac-proxy/0.log" Sep 30 13:26:15 crc kubenswrapper[5002]: I0930 13:26:15.425106 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-z2dcj_0fa1c573-cbf9-43f1-8106-7cf73e93f1f3/manager/0.log" Sep 30 13:26:15 crc kubenswrapper[5002]: I0930 13:26:15.494615 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-n44h7_e5254af8-649d-413f-b146-51c982f48073/kube-rbac-proxy/0.log" Sep 30 13:26:15 crc kubenswrapper[5002]: I0930 13:26:15.557840 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-n44h7_e5254af8-649d-413f-b146-51c982f48073/manager/0.log" Sep 30 13:26:17 crc kubenswrapper[5002]: I0930 13:26:17.776543 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-qt59t"] Sep 30 13:26:17 crc kubenswrapper[5002]: E0930 13:26:17.777333 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fac27c03-7314-47bf-bc92-f750d6a303bc" containerName="container-00" Sep 30 13:26:17 crc kubenswrapper[5002]: I0930 13:26:17.777345 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fac27c03-7314-47bf-bc92-f750d6a303bc" containerName="container-00" Sep 30 13:26:17 crc kubenswrapper[5002]: I0930 13:26:17.777545 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="fac27c03-7314-47bf-bc92-f750d6a303bc" containerName="container-00" Sep 30 13:26:17 crc kubenswrapper[5002]: I0930 13:26:17.778777 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qt59t" Sep 30 13:26:17 crc kubenswrapper[5002]: I0930 13:26:17.786915 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qt59t"] Sep 30 13:26:17 crc kubenswrapper[5002]: I0930 13:26:17.907477 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b06b055-b497-43f7-8797-99aea73dd071-utilities\") pod \"redhat-operators-qt59t\" (UID: \"0b06b055-b497-43f7-8797-99aea73dd071\") " pod="openshift-marketplace/redhat-operators-qt59t" Sep 30 13:26:17 crc kubenswrapper[5002]: I0930 13:26:17.907553 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b06b055-b497-43f7-8797-99aea73dd071-catalog-content\") pod \"redhat-operators-qt59t\" (UID: \"0b06b055-b497-43f7-8797-99aea73dd071\") " pod="openshift-marketplace/redhat-operators-qt59t" Sep 30 13:26:17 crc kubenswrapper[5002]: I0930 13:26:17.907658 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzbg7\" (UniqueName: \"kubernetes.io/projected/0b06b055-b497-43f7-8797-99aea73dd071-kube-api-access-gzbg7\") pod \"redhat-operators-qt59t\" (UID: \"0b06b055-b497-43f7-8797-99aea73dd071\") " pod="openshift-marketplace/redhat-operators-qt59t" Sep 30 13:26:18 crc kubenswrapper[5002]: I0930 13:26:18.008958 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzbg7\" (UniqueName: \"kubernetes.io/projected/0b06b055-b497-43f7-8797-99aea73dd071-kube-api-access-gzbg7\") pod \"redhat-operators-qt59t\" (UID: \"0b06b055-b497-43f7-8797-99aea73dd071\") " pod="openshift-marketplace/redhat-operators-qt59t" Sep 30 13:26:18 crc kubenswrapper[5002]: I0930 13:26:18.009390 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b06b055-b497-43f7-8797-99aea73dd071-utilities\") pod \"redhat-operators-qt59t\" (UID: \"0b06b055-b497-43f7-8797-99aea73dd071\") " pod="openshift-marketplace/redhat-operators-qt59t" Sep 30 13:26:18 crc kubenswrapper[5002]: I0930 13:26:18.009427 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b06b055-b497-43f7-8797-99aea73dd071-catalog-content\") pod \"redhat-operators-qt59t\" (UID: \"0b06b055-b497-43f7-8797-99aea73dd071\") " pod="openshift-marketplace/redhat-operators-qt59t" Sep 30 13:26:18 crc kubenswrapper[5002]: I0930 13:26:18.009877 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b06b055-b497-43f7-8797-99aea73dd071-catalog-content\") pod \"redhat-operators-qt59t\" (UID: \"0b06b055-b497-43f7-8797-99aea73dd071\") " pod="openshift-marketplace/redhat-operators-qt59t" Sep 30 13:26:18 crc kubenswrapper[5002]: I0930 13:26:18.009967 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b06b055-b497-43f7-8797-99aea73dd071-utilities\") pod \"redhat-operators-qt59t\" (UID: \"0b06b055-b497-43f7-8797-99aea73dd071\") " pod="openshift-marketplace/redhat-operators-qt59t" Sep 30 13:26:18 crc kubenswrapper[5002]: I0930 13:26:18.030349 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzbg7\" (UniqueName: \"kubernetes.io/projected/0b06b055-b497-43f7-8797-99aea73dd071-kube-api-access-gzbg7\") pod \"redhat-operators-qt59t\" (UID: \"0b06b055-b497-43f7-8797-99aea73dd071\") " pod="openshift-marketplace/redhat-operators-qt59t" Sep 30 13:26:18 crc kubenswrapper[5002]: I0930 13:26:18.106880 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qt59t" Sep 30 13:26:18 crc kubenswrapper[5002]: I0930 13:26:18.572504 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qt59t"] Sep 30 13:26:18 crc kubenswrapper[5002]: W0930 13:26:18.585790 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0b06b055_b497_43f7_8797_99aea73dd071.slice/crio-fd2e454f5e93fd38fab4f903b859699ea2be833146307ee6d29ec69e8dcc2810 WatchSource:0}: Error finding container fd2e454f5e93fd38fab4f903b859699ea2be833146307ee6d29ec69e8dcc2810: Status 404 returned error can't find the container with id fd2e454f5e93fd38fab4f903b859699ea2be833146307ee6d29ec69e8dcc2810 Sep 30 13:26:19 crc kubenswrapper[5002]: I0930 13:26:19.420848 5002 generic.go:334] "Generic (PLEG): container finished" podID="0b06b055-b497-43f7-8797-99aea73dd071" containerID="726dd397be1c8ba662b0a9d915399771628c76107994f29d375b025c8133e50b" exitCode=0 Sep 30 13:26:19 crc kubenswrapper[5002]: I0930 13:26:19.420947 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qt59t" event={"ID":"0b06b055-b497-43f7-8797-99aea73dd071","Type":"ContainerDied","Data":"726dd397be1c8ba662b0a9d915399771628c76107994f29d375b025c8133e50b"} Sep 30 13:26:19 crc kubenswrapper[5002]: I0930 13:26:19.421255 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qt59t" event={"ID":"0b06b055-b497-43f7-8797-99aea73dd071","Type":"ContainerStarted","Data":"fd2e454f5e93fd38fab4f903b859699ea2be833146307ee6d29ec69e8dcc2810"} Sep 30 13:26:19 crc kubenswrapper[5002]: I0930 13:26:19.422908 5002 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 13:26:21 crc kubenswrapper[5002]: I0930 13:26:21.439069 5002 generic.go:334] "Generic (PLEG): container finished" podID="0b06b055-b497-43f7-8797-99aea73dd071" containerID="10dd1e8dfc0cb7ecd476d332524171d649e1fa21dc6c457c6b9175cf3bb9687c" exitCode=0 Sep 30 13:26:21 crc kubenswrapper[5002]: I0930 13:26:21.439229 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qt59t" event={"ID":"0b06b055-b497-43f7-8797-99aea73dd071","Type":"ContainerDied","Data":"10dd1e8dfc0cb7ecd476d332524171d649e1fa21dc6c457c6b9175cf3bb9687c"} Sep 30 13:26:22 crc kubenswrapper[5002]: I0930 13:26:22.451198 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qt59t" event={"ID":"0b06b055-b497-43f7-8797-99aea73dd071","Type":"ContainerStarted","Data":"fdea144376457bca5b26c275136b3f4a08f85e66533ef7a2efcc82bcb35824e7"} Sep 30 13:26:22 crc kubenswrapper[5002]: I0930 13:26:22.490934 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-qt59t" podStartSLOduration=2.844105978 podStartE2EDuration="5.490909758s" podCreationTimestamp="2025-09-30 13:26:17 +0000 UTC" firstStartedPulling="2025-09-30 13:26:19.422666501 +0000 UTC m=+3953.672348647" lastFinishedPulling="2025-09-30 13:26:22.069470281 +0000 UTC m=+3956.319152427" observedRunningTime="2025-09-30 13:26:22.481241243 +0000 UTC m=+3956.730923409" watchObservedRunningTime="2025-09-30 13:26:22.490909758 +0000 UTC m=+3956.740591904" Sep 30 13:26:28 crc kubenswrapper[5002]: I0930 13:26:28.108135 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-qt59t" Sep 30 13:26:28 crc kubenswrapper[5002]: I0930 13:26:28.108747 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-qt59t" Sep 30 13:26:28 crc kubenswrapper[5002]: I0930 13:26:28.152177 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-qt59t" Sep 30 13:26:28 crc kubenswrapper[5002]: I0930 13:26:28.563630 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-qt59t" Sep 30 13:26:28 crc kubenswrapper[5002]: I0930 13:26:28.609629 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qt59t"] Sep 30 13:26:30 crc kubenswrapper[5002]: I0930 13:26:30.527077 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-qt59t" podUID="0b06b055-b497-43f7-8797-99aea73dd071" containerName="registry-server" containerID="cri-o://fdea144376457bca5b26c275136b3f4a08f85e66533ef7a2efcc82bcb35824e7" gracePeriod=2 Sep 30 13:26:31 crc kubenswrapper[5002]: I0930 13:26:31.000763 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qt59t" Sep 30 13:26:31 crc kubenswrapper[5002]: I0930 13:26:31.151280 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b06b055-b497-43f7-8797-99aea73dd071-catalog-content\") pod \"0b06b055-b497-43f7-8797-99aea73dd071\" (UID: \"0b06b055-b497-43f7-8797-99aea73dd071\") " Sep 30 13:26:31 crc kubenswrapper[5002]: I0930 13:26:31.151546 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gzbg7\" (UniqueName: \"kubernetes.io/projected/0b06b055-b497-43f7-8797-99aea73dd071-kube-api-access-gzbg7\") pod \"0b06b055-b497-43f7-8797-99aea73dd071\" (UID: \"0b06b055-b497-43f7-8797-99aea73dd071\") " Sep 30 13:26:31 crc kubenswrapper[5002]: I0930 13:26:31.151670 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b06b055-b497-43f7-8797-99aea73dd071-utilities\") pod \"0b06b055-b497-43f7-8797-99aea73dd071\" (UID: \"0b06b055-b497-43f7-8797-99aea73dd071\") " Sep 30 13:26:31 crc kubenswrapper[5002]: I0930 13:26:31.152386 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b06b055-b497-43f7-8797-99aea73dd071-utilities" (OuterVolumeSpecName: "utilities") pod "0b06b055-b497-43f7-8797-99aea73dd071" (UID: "0b06b055-b497-43f7-8797-99aea73dd071"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:26:31 crc kubenswrapper[5002]: I0930 13:26:31.157231 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b06b055-b497-43f7-8797-99aea73dd071-kube-api-access-gzbg7" (OuterVolumeSpecName: "kube-api-access-gzbg7") pod "0b06b055-b497-43f7-8797-99aea73dd071" (UID: "0b06b055-b497-43f7-8797-99aea73dd071"). InnerVolumeSpecName "kube-api-access-gzbg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:26:31 crc kubenswrapper[5002]: I0930 13:26:31.253789 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gzbg7\" (UniqueName: \"kubernetes.io/projected/0b06b055-b497-43f7-8797-99aea73dd071-kube-api-access-gzbg7\") on node \"crc\" DevicePath \"\"" Sep 30 13:26:31 crc kubenswrapper[5002]: I0930 13:26:31.253832 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b06b055-b497-43f7-8797-99aea73dd071-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 13:26:31 crc kubenswrapper[5002]: I0930 13:26:31.539407 5002 generic.go:334] "Generic (PLEG): container finished" podID="0b06b055-b497-43f7-8797-99aea73dd071" containerID="fdea144376457bca5b26c275136b3f4a08f85e66533ef7a2efcc82bcb35824e7" exitCode=0 Sep 30 13:26:31 crc kubenswrapper[5002]: I0930 13:26:31.539465 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qt59t" event={"ID":"0b06b055-b497-43f7-8797-99aea73dd071","Type":"ContainerDied","Data":"fdea144376457bca5b26c275136b3f4a08f85e66533ef7a2efcc82bcb35824e7"} Sep 30 13:26:31 crc kubenswrapper[5002]: I0930 13:26:31.539508 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qt59t" event={"ID":"0b06b055-b497-43f7-8797-99aea73dd071","Type":"ContainerDied","Data":"fd2e454f5e93fd38fab4f903b859699ea2be833146307ee6d29ec69e8dcc2810"} Sep 30 13:26:31 crc kubenswrapper[5002]: I0930 13:26:31.539508 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qt59t" Sep 30 13:26:31 crc kubenswrapper[5002]: I0930 13:26:31.539532 5002 scope.go:117] "RemoveContainer" containerID="fdea144376457bca5b26c275136b3f4a08f85e66533ef7a2efcc82bcb35824e7" Sep 30 13:26:31 crc kubenswrapper[5002]: I0930 13:26:31.564944 5002 scope.go:117] "RemoveContainer" containerID="10dd1e8dfc0cb7ecd476d332524171d649e1fa21dc6c457c6b9175cf3bb9687c" Sep 30 13:26:31 crc kubenswrapper[5002]: I0930 13:26:31.585212 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-2pmxc_f27fcf7d-17db-407a-b6ee-e34779332edf/control-plane-machine-set-operator/0.log" Sep 30 13:26:31 crc kubenswrapper[5002]: I0930 13:26:31.606870 5002 scope.go:117] "RemoveContainer" containerID="726dd397be1c8ba662b0a9d915399771628c76107994f29d375b025c8133e50b" Sep 30 13:26:31 crc kubenswrapper[5002]: I0930 13:26:31.636708 5002 scope.go:117] "RemoveContainer" containerID="fdea144376457bca5b26c275136b3f4a08f85e66533ef7a2efcc82bcb35824e7" Sep 30 13:26:31 crc kubenswrapper[5002]: E0930 13:26:31.637072 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fdea144376457bca5b26c275136b3f4a08f85e66533ef7a2efcc82bcb35824e7\": container with ID starting with fdea144376457bca5b26c275136b3f4a08f85e66533ef7a2efcc82bcb35824e7 not found: ID does not exist" containerID="fdea144376457bca5b26c275136b3f4a08f85e66533ef7a2efcc82bcb35824e7" Sep 30 13:26:31 crc kubenswrapper[5002]: I0930 13:26:31.637111 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fdea144376457bca5b26c275136b3f4a08f85e66533ef7a2efcc82bcb35824e7"} err="failed to get container status \"fdea144376457bca5b26c275136b3f4a08f85e66533ef7a2efcc82bcb35824e7\": rpc error: code = NotFound desc = could not find container \"fdea144376457bca5b26c275136b3f4a08f85e66533ef7a2efcc82bcb35824e7\": container with ID starting with fdea144376457bca5b26c275136b3f4a08f85e66533ef7a2efcc82bcb35824e7 not found: ID does not exist" Sep 30 13:26:31 crc kubenswrapper[5002]: I0930 13:26:31.637134 5002 scope.go:117] "RemoveContainer" containerID="10dd1e8dfc0cb7ecd476d332524171d649e1fa21dc6c457c6b9175cf3bb9687c" Sep 30 13:26:31 crc kubenswrapper[5002]: E0930 13:26:31.637512 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10dd1e8dfc0cb7ecd476d332524171d649e1fa21dc6c457c6b9175cf3bb9687c\": container with ID starting with 10dd1e8dfc0cb7ecd476d332524171d649e1fa21dc6c457c6b9175cf3bb9687c not found: ID does not exist" containerID="10dd1e8dfc0cb7ecd476d332524171d649e1fa21dc6c457c6b9175cf3bb9687c" Sep 30 13:26:31 crc kubenswrapper[5002]: I0930 13:26:31.637579 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10dd1e8dfc0cb7ecd476d332524171d649e1fa21dc6c457c6b9175cf3bb9687c"} err="failed to get container status \"10dd1e8dfc0cb7ecd476d332524171d649e1fa21dc6c457c6b9175cf3bb9687c\": rpc error: code = NotFound desc = could not find container \"10dd1e8dfc0cb7ecd476d332524171d649e1fa21dc6c457c6b9175cf3bb9687c\": container with ID starting with 10dd1e8dfc0cb7ecd476d332524171d649e1fa21dc6c457c6b9175cf3bb9687c not found: ID does not exist" Sep 30 13:26:31 crc kubenswrapper[5002]: I0930 13:26:31.637613 5002 scope.go:117] "RemoveContainer" containerID="726dd397be1c8ba662b0a9d915399771628c76107994f29d375b025c8133e50b" Sep 30 13:26:31 crc kubenswrapper[5002]: E0930 13:26:31.638578 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"726dd397be1c8ba662b0a9d915399771628c76107994f29d375b025c8133e50b\": container with ID starting with 726dd397be1c8ba662b0a9d915399771628c76107994f29d375b025c8133e50b not found: ID does not exist" containerID="726dd397be1c8ba662b0a9d915399771628c76107994f29d375b025c8133e50b" Sep 30 13:26:31 crc kubenswrapper[5002]: I0930 13:26:31.638609 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"726dd397be1c8ba662b0a9d915399771628c76107994f29d375b025c8133e50b"} err="failed to get container status \"726dd397be1c8ba662b0a9d915399771628c76107994f29d375b025c8133e50b\": rpc error: code = NotFound desc = could not find container \"726dd397be1c8ba662b0a9d915399771628c76107994f29d375b025c8133e50b\": container with ID starting with 726dd397be1c8ba662b0a9d915399771628c76107994f29d375b025c8133e50b not found: ID does not exist" Sep 30 13:26:31 crc kubenswrapper[5002]: I0930 13:26:31.731235 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-fp74c_ccd47538-6f91-4c6d-91b0-afccf0c83b20/kube-rbac-proxy/0.log" Sep 30 13:26:31 crc kubenswrapper[5002]: I0930 13:26:31.782250 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-fp74c_ccd47538-6f91-4c6d-91b0-afccf0c83b20/machine-api-operator/0.log" Sep 30 13:26:31 crc kubenswrapper[5002]: I0930 13:26:31.869143 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b06b055-b497-43f7-8797-99aea73dd071-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0b06b055-b497-43f7-8797-99aea73dd071" (UID: "0b06b055-b497-43f7-8797-99aea73dd071"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:26:31 crc kubenswrapper[5002]: I0930 13:26:31.965517 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b06b055-b497-43f7-8797-99aea73dd071-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 13:26:32 crc kubenswrapper[5002]: I0930 13:26:32.171412 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qt59t"] Sep 30 13:26:32 crc kubenswrapper[5002]: I0930 13:26:32.182127 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-qt59t"] Sep 30 13:26:32 crc kubenswrapper[5002]: I0930 13:26:32.690194 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b06b055-b497-43f7-8797-99aea73dd071" path="/var/lib/kubelet/pods/0b06b055-b497-43f7-8797-99aea73dd071/volumes" Sep 30 13:26:44 crc kubenswrapper[5002]: I0930 13:26:44.151808 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-6z4b2_fa128ff5-c231-478a-8a20-a617f7187459/cert-manager-controller/0.log" Sep 30 13:26:44 crc kubenswrapper[5002]: I0930 13:26:44.346015 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-h79vx_22886b45-e205-4eed-8610-087e217a2f3e/cert-manager-webhook/0.log" Sep 30 13:26:44 crc kubenswrapper[5002]: I0930 13:26:44.361393 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-974dk_cf984127-3ede-48b0-84a6-aaa1c3c321c1/cert-manager-cainjector/0.log" Sep 30 13:26:55 crc kubenswrapper[5002]: I0930 13:26:55.370922 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-864bb6dfb5-p8l92_c350ea0c-aa9c-4ccb-9607-aeff49e295b1/nmstate-console-plugin/0.log" Sep 30 13:26:55 crc kubenswrapper[5002]: I0930 13:26:55.933061 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-4s6bl_b3b353e1-b31e-45ba-b22e-6e78fd291203/nmstate-handler/0.log" Sep 30 13:26:56 crc kubenswrapper[5002]: I0930 13:26:56.033210 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-w7bgk_54cfc79e-c203-4715-a7d4-0120f7577db6/kube-rbac-proxy/0.log" Sep 30 13:26:56 crc kubenswrapper[5002]: I0930 13:26:56.047054 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-w7bgk_54cfc79e-c203-4715-a7d4-0120f7577db6/nmstate-metrics/0.log" Sep 30 13:26:56 crc kubenswrapper[5002]: I0930 13:26:56.188262 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5d6f6cfd66-45rf2_f5de2a34-58db-4513-a544-baac53a6ee7f/nmstate-operator/0.log" Sep 30 13:26:56 crc kubenswrapper[5002]: I0930 13:26:56.265817 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6d689559c5-5q5mt_7a89ecb4-494a-40f2-8e0c-871b2c94f8a2/nmstate-webhook/0.log" Sep 30 13:27:02 crc kubenswrapper[5002]: I0930 13:27:02.098156 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:27:02 crc kubenswrapper[5002]: I0930 13:27:02.098800 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:27:03 crc kubenswrapper[5002]: I0930 13:27:03.178447 5002 scope.go:117] "RemoveContainer" containerID="01ed2fe0f9f4f58f0ba9b636cdf468b863c117e21bd51bdeade9cba762ec0f3e" Sep 30 13:27:09 crc kubenswrapper[5002]: I0930 13:27:09.371361 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-4hv4v_9381f980-e2fe-4cf6-97ad-93757413f357/kube-rbac-proxy/0.log" Sep 30 13:27:09 crc kubenswrapper[5002]: I0930 13:27:09.537382 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-4hv4v_9381f980-e2fe-4cf6-97ad-93757413f357/controller/0.log" Sep 30 13:27:09 crc kubenswrapper[5002]: I0930 13:27:09.586283 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/cp-frr-files/0.log" Sep 30 13:27:09 crc kubenswrapper[5002]: I0930 13:27:09.731348 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/cp-frr-files/0.log" Sep 30 13:27:09 crc kubenswrapper[5002]: I0930 13:27:09.784379 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/cp-metrics/0.log" Sep 30 13:27:09 crc kubenswrapper[5002]: I0930 13:27:09.792523 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/cp-reloader/0.log" Sep 30 13:27:09 crc kubenswrapper[5002]: I0930 13:27:09.803880 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/cp-reloader/0.log" Sep 30 13:27:09 crc kubenswrapper[5002]: I0930 13:27:09.947306 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/cp-reloader/0.log" Sep 30 13:27:09 crc kubenswrapper[5002]: I0930 13:27:09.954532 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/cp-frr-files/0.log" Sep 30 13:27:09 crc kubenswrapper[5002]: I0930 13:27:09.981994 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/cp-metrics/0.log" Sep 30 13:27:09 crc kubenswrapper[5002]: I0930 13:27:09.999575 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/cp-metrics/0.log" Sep 30 13:27:10 crc kubenswrapper[5002]: I0930 13:27:10.158574 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/cp-frr-files/0.log" Sep 30 13:27:10 crc kubenswrapper[5002]: I0930 13:27:10.174383 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/cp-reloader/0.log" Sep 30 13:27:10 crc kubenswrapper[5002]: I0930 13:27:10.180537 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/cp-metrics/0.log" Sep 30 13:27:10 crc kubenswrapper[5002]: I0930 13:27:10.192664 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/controller/0.log" Sep 30 13:27:10 crc kubenswrapper[5002]: I0930 13:27:10.328830 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/frr-metrics/0.log" Sep 30 13:27:10 crc kubenswrapper[5002]: I0930 13:27:10.384522 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/kube-rbac-proxy/0.log" Sep 30 13:27:10 crc kubenswrapper[5002]: I0930 13:27:10.386884 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/kube-rbac-proxy-frr/0.log" Sep 30 13:27:10 crc kubenswrapper[5002]: I0930 13:27:10.569074 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/reloader/0.log" Sep 30 13:27:10 crc kubenswrapper[5002]: I0930 13:27:10.638238 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-5478bdb765-j499w_94a678f1-7504-4246-9ce4-23886b1a3623/frr-k8s-webhook-server/0.log" Sep 30 13:27:10 crc kubenswrapper[5002]: I0930 13:27:10.844444 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-576d687654-bw9lz_27c28a83-93f3-40e5-9430-e95593fb9b70/manager/0.log" Sep 30 13:27:11 crc kubenswrapper[5002]: I0930 13:27:11.023178 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6dd5844d8b-5jxxh_6e455bb5-7fde-4e6e-a287-e053991325b2/webhook-server/0.log" Sep 30 13:27:11 crc kubenswrapper[5002]: I0930 13:27:11.094532 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-tq2q9_bae6d903-934d-4f98-9924-805cc9b20d5c/kube-rbac-proxy/0.log" Sep 30 13:27:11 crc kubenswrapper[5002]: I0930 13:27:11.678673 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-tq2q9_bae6d903-934d-4f98-9924-805cc9b20d5c/speaker/0.log" Sep 30 13:27:11 crc kubenswrapper[5002]: I0930 13:27:11.866810 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fxsmq_66fde16c-f197-4399-bef6-7ea1d7d41611/frr/0.log" Sep 30 13:27:23 crc kubenswrapper[5002]: I0930 13:27:23.634587 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg_8eafa11f-bdad-4035-82be-1fe0e27a0282/util/0.log" Sep 30 13:27:23 crc kubenswrapper[5002]: I0930 13:27:23.807334 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg_8eafa11f-bdad-4035-82be-1fe0e27a0282/util/0.log" Sep 30 13:27:23 crc kubenswrapper[5002]: I0930 13:27:23.835324 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg_8eafa11f-bdad-4035-82be-1fe0e27a0282/pull/0.log" Sep 30 13:27:23 crc kubenswrapper[5002]: I0930 13:27:23.841730 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg_8eafa11f-bdad-4035-82be-1fe0e27a0282/pull/0.log" Sep 30 13:27:24 crc kubenswrapper[5002]: I0930 13:27:24.017151 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg_8eafa11f-bdad-4035-82be-1fe0e27a0282/util/0.log" Sep 30 13:27:24 crc kubenswrapper[5002]: I0930 13:27:24.040644 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg_8eafa11f-bdad-4035-82be-1fe0e27a0282/pull/0.log" Sep 30 13:27:24 crc kubenswrapper[5002]: I0930 13:27:24.044639 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcstdhg_8eafa11f-bdad-4035-82be-1fe0e27a0282/extract/0.log" Sep 30 13:27:24 crc kubenswrapper[5002]: I0930 13:27:24.178378 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p4m6q_122cb276-c38c-4c29-80f9-b9e225b0a5a6/extract-utilities/0.log" Sep 30 13:27:24 crc kubenswrapper[5002]: I0930 13:27:24.328826 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p4m6q_122cb276-c38c-4c29-80f9-b9e225b0a5a6/extract-content/0.log" Sep 30 13:27:24 crc kubenswrapper[5002]: I0930 13:27:24.329258 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p4m6q_122cb276-c38c-4c29-80f9-b9e225b0a5a6/extract-utilities/0.log" Sep 30 13:27:24 crc kubenswrapper[5002]: I0930 13:27:24.333062 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p4m6q_122cb276-c38c-4c29-80f9-b9e225b0a5a6/extract-content/0.log" Sep 30 13:27:24 crc kubenswrapper[5002]: I0930 13:27:24.516536 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p4m6q_122cb276-c38c-4c29-80f9-b9e225b0a5a6/extract-content/0.log" Sep 30 13:27:24 crc kubenswrapper[5002]: I0930 13:27:24.518109 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p4m6q_122cb276-c38c-4c29-80f9-b9e225b0a5a6/extract-utilities/0.log" Sep 30 13:27:25 crc kubenswrapper[5002]: I0930 13:27:25.048730 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rqfbc_d184b411-2932-4ace-a20a-b81cdfec713a/extract-utilities/0.log" Sep 30 13:27:25 crc kubenswrapper[5002]: I0930 13:27:25.110510 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p4m6q_122cb276-c38c-4c29-80f9-b9e225b0a5a6/registry-server/0.log" Sep 30 13:27:25 crc kubenswrapper[5002]: I0930 13:27:25.251028 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rqfbc_d184b411-2932-4ace-a20a-b81cdfec713a/extract-utilities/0.log" Sep 30 13:27:25 crc kubenswrapper[5002]: I0930 13:27:25.252131 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rqfbc_d184b411-2932-4ace-a20a-b81cdfec713a/extract-content/0.log" Sep 30 13:27:25 crc kubenswrapper[5002]: I0930 13:27:25.259648 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rqfbc_d184b411-2932-4ace-a20a-b81cdfec713a/extract-content/0.log" Sep 30 13:27:25 crc kubenswrapper[5002]: I0930 13:27:25.413067 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rqfbc_d184b411-2932-4ace-a20a-b81cdfec713a/extract-utilities/0.log" Sep 30 13:27:25 crc kubenswrapper[5002]: I0930 13:27:25.550723 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rqfbc_d184b411-2932-4ace-a20a-b81cdfec713a/extract-content/0.log" Sep 30 13:27:25 crc kubenswrapper[5002]: I0930 13:27:25.604512 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5_9da270d3-58ec-44e6-acaa-3cb86fbc2047/util/0.log" Sep 30 13:27:25 crc kubenswrapper[5002]: I0930 13:27:25.762136 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rqfbc_d184b411-2932-4ace-a20a-b81cdfec713a/registry-server/0.log" Sep 30 13:27:25 crc kubenswrapper[5002]: I0930 13:27:25.860951 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5_9da270d3-58ec-44e6-acaa-3cb86fbc2047/pull/0.log" Sep 30 13:27:25 crc kubenswrapper[5002]: I0930 13:27:25.861062 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5_9da270d3-58ec-44e6-acaa-3cb86fbc2047/util/0.log" Sep 30 13:27:25 crc kubenswrapper[5002]: I0930 13:27:25.913118 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5_9da270d3-58ec-44e6-acaa-3cb86fbc2047/pull/0.log" Sep 30 13:27:26 crc kubenswrapper[5002]: I0930 13:27:26.067609 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5_9da270d3-58ec-44e6-acaa-3cb86fbc2047/util/0.log" Sep 30 13:27:26 crc kubenswrapper[5002]: I0930 13:27:26.071775 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5_9da270d3-58ec-44e6-acaa-3cb86fbc2047/extract/0.log" Sep 30 13:27:26 crc kubenswrapper[5002]: I0930 13:27:26.090874 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96bqfq5_9da270d3-58ec-44e6-acaa-3cb86fbc2047/pull/0.log" Sep 30 13:27:26 crc kubenswrapper[5002]: I0930 13:27:26.274185 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kzj7j_68797de5-40f1-448f-9fbb-fa3eb4adc842/extract-utilities/0.log" Sep 30 13:27:26 crc kubenswrapper[5002]: I0930 13:27:26.284761 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-hrqrc_75fd37d6-6c7a-4c39-8ed0-c9ba6caa2b7f/marketplace-operator/0.log" Sep 30 13:27:27 crc kubenswrapper[5002]: I0930 13:27:27.064850 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kzj7j_68797de5-40f1-448f-9fbb-fa3eb4adc842/extract-utilities/0.log" Sep 30 13:27:27 crc kubenswrapper[5002]: I0930 13:27:27.078995 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kzj7j_68797de5-40f1-448f-9fbb-fa3eb4adc842/extract-content/0.log" Sep 30 13:27:27 crc kubenswrapper[5002]: I0930 13:27:27.081929 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kzj7j_68797de5-40f1-448f-9fbb-fa3eb4adc842/extract-content/0.log" Sep 30 13:27:27 crc kubenswrapper[5002]: I0930 13:27:27.278438 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kzj7j_68797de5-40f1-448f-9fbb-fa3eb4adc842/extract-utilities/0.log" Sep 30 13:27:27 crc kubenswrapper[5002]: I0930 13:27:27.364345 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kzj7j_68797de5-40f1-448f-9fbb-fa3eb4adc842/extract-content/0.log" Sep 30 13:27:27 crc kubenswrapper[5002]: I0930 13:27:27.373432 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-twns5_dc331b2c-354f-4cb2-9aab-2f8328781341/extract-utilities/0.log" Sep 30 13:27:27 crc kubenswrapper[5002]: I0930 13:27:27.435502 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kzj7j_68797de5-40f1-448f-9fbb-fa3eb4adc842/registry-server/0.log" Sep 30 13:27:27 crc kubenswrapper[5002]: I0930 13:27:27.597587 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-twns5_dc331b2c-354f-4cb2-9aab-2f8328781341/extract-utilities/0.log" Sep 30 13:27:27 crc kubenswrapper[5002]: I0930 13:27:27.610185 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-twns5_dc331b2c-354f-4cb2-9aab-2f8328781341/extract-content/0.log" Sep 30 13:27:27 crc kubenswrapper[5002]: I0930 13:27:27.629100 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-twns5_dc331b2c-354f-4cb2-9aab-2f8328781341/extract-content/0.log" Sep 30 13:27:27 crc kubenswrapper[5002]: I0930 13:27:27.803523 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-twns5_dc331b2c-354f-4cb2-9aab-2f8328781341/extract-utilities/0.log" Sep 30 13:27:27 crc kubenswrapper[5002]: I0930 13:27:27.984854 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-twns5_dc331b2c-354f-4cb2-9aab-2f8328781341/extract-content/0.log" Sep 30 13:27:28 crc kubenswrapper[5002]: I0930 13:27:28.468514 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-twns5_dc331b2c-354f-4cb2-9aab-2f8328781341/registry-server/0.log" Sep 30 13:27:32 crc kubenswrapper[5002]: I0930 13:27:32.098622 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:27:32 crc kubenswrapper[5002]: I0930 13:27:32.100166 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:27:46 crc kubenswrapper[5002]: I0930 13:27:46.696369 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vb4tm"] Sep 30 13:27:46 crc kubenswrapper[5002]: E0930 13:27:46.697319 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b06b055-b497-43f7-8797-99aea73dd071" containerName="extract-content" Sep 30 13:27:46 crc kubenswrapper[5002]: I0930 13:27:46.697335 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b06b055-b497-43f7-8797-99aea73dd071" containerName="extract-content" Sep 30 13:27:46 crc kubenswrapper[5002]: E0930 13:27:46.697376 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b06b055-b497-43f7-8797-99aea73dd071" containerName="extract-utilities" Sep 30 13:27:46 crc kubenswrapper[5002]: I0930 13:27:46.697383 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b06b055-b497-43f7-8797-99aea73dd071" containerName="extract-utilities" Sep 30 13:27:46 crc kubenswrapper[5002]: E0930 13:27:46.697401 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b06b055-b497-43f7-8797-99aea73dd071" containerName="registry-server" Sep 30 13:27:46 crc kubenswrapper[5002]: I0930 13:27:46.697409 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b06b055-b497-43f7-8797-99aea73dd071" containerName="registry-server" Sep 30 13:27:46 crc kubenswrapper[5002]: I0930 13:27:46.697626 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b06b055-b497-43f7-8797-99aea73dd071" containerName="registry-server" Sep 30 13:27:46 crc kubenswrapper[5002]: I0930 13:27:46.699218 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vb4tm" Sep 30 13:27:46 crc kubenswrapper[5002]: I0930 13:27:46.711648 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vb4tm"] Sep 30 13:27:46 crc kubenswrapper[5002]: I0930 13:27:46.772312 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f712a13-38d0-49e4-8aef-939c3451e6f3-utilities\") pod \"certified-operators-vb4tm\" (UID: \"9f712a13-38d0-49e4-8aef-939c3451e6f3\") " pod="openshift-marketplace/certified-operators-vb4tm" Sep 30 13:27:46 crc kubenswrapper[5002]: I0930 13:27:46.772401 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rppz7\" (UniqueName: \"kubernetes.io/projected/9f712a13-38d0-49e4-8aef-939c3451e6f3-kube-api-access-rppz7\") pod \"certified-operators-vb4tm\" (UID: \"9f712a13-38d0-49e4-8aef-939c3451e6f3\") " pod="openshift-marketplace/certified-operators-vb4tm" Sep 30 13:27:46 crc kubenswrapper[5002]: I0930 13:27:46.772535 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f712a13-38d0-49e4-8aef-939c3451e6f3-catalog-content\") pod \"certified-operators-vb4tm\" (UID: \"9f712a13-38d0-49e4-8aef-939c3451e6f3\") " pod="openshift-marketplace/certified-operators-vb4tm" Sep 30 13:27:46 crc kubenswrapper[5002]: I0930 13:27:46.874165 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f712a13-38d0-49e4-8aef-939c3451e6f3-catalog-content\") pod \"certified-operators-vb4tm\" (UID: \"9f712a13-38d0-49e4-8aef-939c3451e6f3\") " pod="openshift-marketplace/certified-operators-vb4tm" Sep 30 13:27:46 crc kubenswrapper[5002]: I0930 13:27:46.874293 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f712a13-38d0-49e4-8aef-939c3451e6f3-utilities\") pod \"certified-operators-vb4tm\" (UID: \"9f712a13-38d0-49e4-8aef-939c3451e6f3\") " pod="openshift-marketplace/certified-operators-vb4tm" Sep 30 13:27:46 crc kubenswrapper[5002]: I0930 13:27:46.874373 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rppz7\" (UniqueName: \"kubernetes.io/projected/9f712a13-38d0-49e4-8aef-939c3451e6f3-kube-api-access-rppz7\") pod \"certified-operators-vb4tm\" (UID: \"9f712a13-38d0-49e4-8aef-939c3451e6f3\") " pod="openshift-marketplace/certified-operators-vb4tm" Sep 30 13:27:46 crc kubenswrapper[5002]: I0930 13:27:46.875100 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f712a13-38d0-49e4-8aef-939c3451e6f3-utilities\") pod \"certified-operators-vb4tm\" (UID: \"9f712a13-38d0-49e4-8aef-939c3451e6f3\") " pod="openshift-marketplace/certified-operators-vb4tm" Sep 30 13:27:46 crc kubenswrapper[5002]: I0930 13:27:46.876908 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f712a13-38d0-49e4-8aef-939c3451e6f3-catalog-content\") pod \"certified-operators-vb4tm\" (UID: \"9f712a13-38d0-49e4-8aef-939c3451e6f3\") " pod="openshift-marketplace/certified-operators-vb4tm" Sep 30 13:27:46 crc kubenswrapper[5002]: I0930 13:27:46.900562 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rppz7\" (UniqueName: \"kubernetes.io/projected/9f712a13-38d0-49e4-8aef-939c3451e6f3-kube-api-access-rppz7\") pod \"certified-operators-vb4tm\" (UID: \"9f712a13-38d0-49e4-8aef-939c3451e6f3\") " pod="openshift-marketplace/certified-operators-vb4tm" Sep 30 13:27:47 crc kubenswrapper[5002]: I0930 13:27:47.025942 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vb4tm" Sep 30 13:27:47 crc kubenswrapper[5002]: I0930 13:27:47.690268 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vb4tm"] Sep 30 13:27:48 crc kubenswrapper[5002]: I0930 13:27:48.236135 5002 generic.go:334] "Generic (PLEG): container finished" podID="9f712a13-38d0-49e4-8aef-939c3451e6f3" containerID="d05128f0b8a7db7d2522d2a292c78d79e3dcca643f00375439fa19ee67f3d89d" exitCode=0 Sep 30 13:27:48 crc kubenswrapper[5002]: I0930 13:27:48.236232 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vb4tm" event={"ID":"9f712a13-38d0-49e4-8aef-939c3451e6f3","Type":"ContainerDied","Data":"d05128f0b8a7db7d2522d2a292c78d79e3dcca643f00375439fa19ee67f3d89d"} Sep 30 13:27:48 crc kubenswrapper[5002]: I0930 13:27:48.236539 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vb4tm" event={"ID":"9f712a13-38d0-49e4-8aef-939c3451e6f3","Type":"ContainerStarted","Data":"662e552be74ea16cd7f274e05c475cd7e90e49a93f2c77fc81f3867b4e9a9a23"} Sep 30 13:27:50 crc kubenswrapper[5002]: I0930 13:27:50.274516 5002 generic.go:334] "Generic (PLEG): container finished" podID="9f712a13-38d0-49e4-8aef-939c3451e6f3" containerID="9b9bda792b6c373bce62db926cec8b13d03e6a5a316204d4315c870d796781a4" exitCode=0 Sep 30 13:27:50 crc kubenswrapper[5002]: I0930 13:27:50.274814 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vb4tm" event={"ID":"9f712a13-38d0-49e4-8aef-939c3451e6f3","Type":"ContainerDied","Data":"9b9bda792b6c373bce62db926cec8b13d03e6a5a316204d4315c870d796781a4"} Sep 30 13:27:51 crc kubenswrapper[5002]: I0930 13:27:51.284498 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vb4tm" event={"ID":"9f712a13-38d0-49e4-8aef-939c3451e6f3","Type":"ContainerStarted","Data":"982b6036509a50f0b0ed1becc83b7fc638ffdc795ff5f3637098f54618a4736e"} Sep 30 13:27:52 crc kubenswrapper[5002]: I0930 13:27:52.310203 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vb4tm" podStartSLOduration=3.768355008 podStartE2EDuration="6.310183058s" podCreationTimestamp="2025-09-30 13:27:46 +0000 UTC" firstStartedPulling="2025-09-30 13:27:48.238699847 +0000 UTC m=+4042.488381993" lastFinishedPulling="2025-09-30 13:27:50.780527897 +0000 UTC m=+4045.030210043" observedRunningTime="2025-09-30 13:27:52.307965931 +0000 UTC m=+4046.557648087" watchObservedRunningTime="2025-09-30 13:27:52.310183058 +0000 UTC m=+4046.559865224" Sep 30 13:27:54 crc kubenswrapper[5002]: E0930 13:27:54.887057 5002 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.194:56084->38.102.83.194:40383: write tcp 38.102.83.194:56084->38.102.83.194:40383: write: broken pipe Sep 30 13:27:57 crc kubenswrapper[5002]: I0930 13:27:57.027080 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vb4tm" Sep 30 13:27:57 crc kubenswrapper[5002]: I0930 13:27:57.027909 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vb4tm" Sep 30 13:27:57 crc kubenswrapper[5002]: I0930 13:27:57.077338 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vb4tm" Sep 30 13:27:57 crc kubenswrapper[5002]: I0930 13:27:57.386520 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vb4tm" Sep 30 13:27:57 crc kubenswrapper[5002]: I0930 13:27:57.431964 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vb4tm"] Sep 30 13:27:59 crc kubenswrapper[5002]: I0930 13:27:59.365461 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vb4tm" podUID="9f712a13-38d0-49e4-8aef-939c3451e6f3" containerName="registry-server" containerID="cri-o://982b6036509a50f0b0ed1becc83b7fc638ffdc795ff5f3637098f54618a4736e" gracePeriod=2 Sep 30 13:27:59 crc kubenswrapper[5002]: I0930 13:27:59.834397 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vb4tm" Sep 30 13:27:59 crc kubenswrapper[5002]: I0930 13:27:59.908188 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f712a13-38d0-49e4-8aef-939c3451e6f3-utilities\") pod \"9f712a13-38d0-49e4-8aef-939c3451e6f3\" (UID: \"9f712a13-38d0-49e4-8aef-939c3451e6f3\") " Sep 30 13:27:59 crc kubenswrapper[5002]: I0930 13:27:59.908295 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f712a13-38d0-49e4-8aef-939c3451e6f3-catalog-content\") pod \"9f712a13-38d0-49e4-8aef-939c3451e6f3\" (UID: \"9f712a13-38d0-49e4-8aef-939c3451e6f3\") " Sep 30 13:27:59 crc kubenswrapper[5002]: I0930 13:27:59.908358 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rppz7\" (UniqueName: \"kubernetes.io/projected/9f712a13-38d0-49e4-8aef-939c3451e6f3-kube-api-access-rppz7\") pod \"9f712a13-38d0-49e4-8aef-939c3451e6f3\" (UID: \"9f712a13-38d0-49e4-8aef-939c3451e6f3\") " Sep 30 13:27:59 crc kubenswrapper[5002]: I0930 13:27:59.909356 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f712a13-38d0-49e4-8aef-939c3451e6f3-utilities" (OuterVolumeSpecName: "utilities") pod "9f712a13-38d0-49e4-8aef-939c3451e6f3" (UID: "9f712a13-38d0-49e4-8aef-939c3451e6f3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:27:59 crc kubenswrapper[5002]: I0930 13:27:59.919105 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f712a13-38d0-49e4-8aef-939c3451e6f3-kube-api-access-rppz7" (OuterVolumeSpecName: "kube-api-access-rppz7") pod "9f712a13-38d0-49e4-8aef-939c3451e6f3" (UID: "9f712a13-38d0-49e4-8aef-939c3451e6f3"). InnerVolumeSpecName "kube-api-access-rppz7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:27:59 crc kubenswrapper[5002]: I0930 13:27:59.953201 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f712a13-38d0-49e4-8aef-939c3451e6f3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9f712a13-38d0-49e4-8aef-939c3451e6f3" (UID: "9f712a13-38d0-49e4-8aef-939c3451e6f3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:28:00 crc kubenswrapper[5002]: I0930 13:28:00.010716 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rppz7\" (UniqueName: \"kubernetes.io/projected/9f712a13-38d0-49e4-8aef-939c3451e6f3-kube-api-access-rppz7\") on node \"crc\" DevicePath \"\"" Sep 30 13:28:00 crc kubenswrapper[5002]: I0930 13:28:00.010753 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f712a13-38d0-49e4-8aef-939c3451e6f3-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 13:28:00 crc kubenswrapper[5002]: I0930 13:28:00.010764 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f712a13-38d0-49e4-8aef-939c3451e6f3-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 13:28:00 crc kubenswrapper[5002]: I0930 13:28:00.375922 5002 generic.go:334] "Generic (PLEG): container finished" podID="9f712a13-38d0-49e4-8aef-939c3451e6f3" containerID="982b6036509a50f0b0ed1becc83b7fc638ffdc795ff5f3637098f54618a4736e" exitCode=0 Sep 30 13:28:00 crc kubenswrapper[5002]: I0930 13:28:00.376019 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vb4tm" event={"ID":"9f712a13-38d0-49e4-8aef-939c3451e6f3","Type":"ContainerDied","Data":"982b6036509a50f0b0ed1becc83b7fc638ffdc795ff5f3637098f54618a4736e"} Sep 30 13:28:00 crc kubenswrapper[5002]: I0930 13:28:00.376087 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vb4tm" Sep 30 13:28:00 crc kubenswrapper[5002]: I0930 13:28:00.376105 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vb4tm" event={"ID":"9f712a13-38d0-49e4-8aef-939c3451e6f3","Type":"ContainerDied","Data":"662e552be74ea16cd7f274e05c475cd7e90e49a93f2c77fc81f3867b4e9a9a23"} Sep 30 13:28:00 crc kubenswrapper[5002]: I0930 13:28:00.376152 5002 scope.go:117] "RemoveContainer" containerID="982b6036509a50f0b0ed1becc83b7fc638ffdc795ff5f3637098f54618a4736e" Sep 30 13:28:00 crc kubenswrapper[5002]: I0930 13:28:00.406512 5002 scope.go:117] "RemoveContainer" containerID="9b9bda792b6c373bce62db926cec8b13d03e6a5a316204d4315c870d796781a4" Sep 30 13:28:00 crc kubenswrapper[5002]: I0930 13:28:00.426906 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vb4tm"] Sep 30 13:28:00 crc kubenswrapper[5002]: I0930 13:28:00.437772 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vb4tm"] Sep 30 13:28:00 crc kubenswrapper[5002]: I0930 13:28:00.450447 5002 scope.go:117] "RemoveContainer" containerID="d05128f0b8a7db7d2522d2a292c78d79e3dcca643f00375439fa19ee67f3d89d" Sep 30 13:28:00 crc kubenswrapper[5002]: I0930 13:28:00.490517 5002 scope.go:117] "RemoveContainer" containerID="982b6036509a50f0b0ed1becc83b7fc638ffdc795ff5f3637098f54618a4736e" Sep 30 13:28:00 crc kubenswrapper[5002]: E0930 13:28:00.490921 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"982b6036509a50f0b0ed1becc83b7fc638ffdc795ff5f3637098f54618a4736e\": container with ID starting with 982b6036509a50f0b0ed1becc83b7fc638ffdc795ff5f3637098f54618a4736e not found: ID does not exist" containerID="982b6036509a50f0b0ed1becc83b7fc638ffdc795ff5f3637098f54618a4736e" Sep 30 13:28:00 crc kubenswrapper[5002]: I0930 13:28:00.490955 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"982b6036509a50f0b0ed1becc83b7fc638ffdc795ff5f3637098f54618a4736e"} err="failed to get container status \"982b6036509a50f0b0ed1becc83b7fc638ffdc795ff5f3637098f54618a4736e\": rpc error: code = NotFound desc = could not find container \"982b6036509a50f0b0ed1becc83b7fc638ffdc795ff5f3637098f54618a4736e\": container with ID starting with 982b6036509a50f0b0ed1becc83b7fc638ffdc795ff5f3637098f54618a4736e not found: ID does not exist" Sep 30 13:28:00 crc kubenswrapper[5002]: I0930 13:28:00.490974 5002 scope.go:117] "RemoveContainer" containerID="9b9bda792b6c373bce62db926cec8b13d03e6a5a316204d4315c870d796781a4" Sep 30 13:28:00 crc kubenswrapper[5002]: E0930 13:28:00.491349 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b9bda792b6c373bce62db926cec8b13d03e6a5a316204d4315c870d796781a4\": container with ID starting with 9b9bda792b6c373bce62db926cec8b13d03e6a5a316204d4315c870d796781a4 not found: ID does not exist" containerID="9b9bda792b6c373bce62db926cec8b13d03e6a5a316204d4315c870d796781a4" Sep 30 13:28:00 crc kubenswrapper[5002]: I0930 13:28:00.491368 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b9bda792b6c373bce62db926cec8b13d03e6a5a316204d4315c870d796781a4"} err="failed to get container status \"9b9bda792b6c373bce62db926cec8b13d03e6a5a316204d4315c870d796781a4\": rpc error: code = NotFound desc = could not find container \"9b9bda792b6c373bce62db926cec8b13d03e6a5a316204d4315c870d796781a4\": container with ID starting with 9b9bda792b6c373bce62db926cec8b13d03e6a5a316204d4315c870d796781a4 not found: ID does not exist" Sep 30 13:28:00 crc kubenswrapper[5002]: I0930 13:28:00.491381 5002 scope.go:117] "RemoveContainer" containerID="d05128f0b8a7db7d2522d2a292c78d79e3dcca643f00375439fa19ee67f3d89d" Sep 30 13:28:00 crc kubenswrapper[5002]: E0930 13:28:00.491811 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d05128f0b8a7db7d2522d2a292c78d79e3dcca643f00375439fa19ee67f3d89d\": container with ID starting with d05128f0b8a7db7d2522d2a292c78d79e3dcca643f00375439fa19ee67f3d89d not found: ID does not exist" containerID="d05128f0b8a7db7d2522d2a292c78d79e3dcca643f00375439fa19ee67f3d89d" Sep 30 13:28:00 crc kubenswrapper[5002]: I0930 13:28:00.491836 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d05128f0b8a7db7d2522d2a292c78d79e3dcca643f00375439fa19ee67f3d89d"} err="failed to get container status \"d05128f0b8a7db7d2522d2a292c78d79e3dcca643f00375439fa19ee67f3d89d\": rpc error: code = NotFound desc = could not find container \"d05128f0b8a7db7d2522d2a292c78d79e3dcca643f00375439fa19ee67f3d89d\": container with ID starting with d05128f0b8a7db7d2522d2a292c78d79e3dcca643f00375439fa19ee67f3d89d not found: ID does not exist" Sep 30 13:28:00 crc kubenswrapper[5002]: I0930 13:28:00.689111 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f712a13-38d0-49e4-8aef-939c3451e6f3" path="/var/lib/kubelet/pods/9f712a13-38d0-49e4-8aef-939c3451e6f3/volumes" Sep 30 13:28:02 crc kubenswrapper[5002]: I0930 13:28:02.099358 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:28:02 crc kubenswrapper[5002]: I0930 13:28:02.099778 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:28:02 crc kubenswrapper[5002]: I0930 13:28:02.099840 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" Sep 30 13:28:02 crc kubenswrapper[5002]: I0930 13:28:02.100825 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"804478c07c3268d7d7f250189ddbad5a2e4a5479ddd419784cb5f98b480546c7"} pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 13:28:02 crc kubenswrapper[5002]: I0930 13:28:02.100902 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" containerID="cri-o://804478c07c3268d7d7f250189ddbad5a2e4a5479ddd419784cb5f98b480546c7" gracePeriod=600 Sep 30 13:28:02 crc kubenswrapper[5002]: I0930 13:28:02.400351 5002 generic.go:334] "Generic (PLEG): container finished" podID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerID="804478c07c3268d7d7f250189ddbad5a2e4a5479ddd419784cb5f98b480546c7" exitCode=0 Sep 30 13:28:02 crc kubenswrapper[5002]: I0930 13:28:02.400527 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerDied","Data":"804478c07c3268d7d7f250189ddbad5a2e4a5479ddd419784cb5f98b480546c7"} Sep 30 13:28:02 crc kubenswrapper[5002]: I0930 13:28:02.400761 5002 scope.go:117] "RemoveContainer" containerID="99dfdbc96d28883eef6b198c958bc370158e50fab55ddd68077640f2dc60329b" Sep 30 13:28:03 crc kubenswrapper[5002]: I0930 13:28:03.413653 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" event={"ID":"341a55c6-78d3-4fa2-8f47-b56fd41fa1c1","Type":"ContainerStarted","Data":"c113bb47bb0e7d2f8915da6931df460004b39ba7d03a9db3906d78e1741608da"} Sep 30 13:29:25 crc kubenswrapper[5002]: I0930 13:29:25.194811 5002 generic.go:334] "Generic (PLEG): container finished" podID="36c403d4-7bcb-4916-b08d-fcf0f1f8470a" containerID="3bca0e61e11f809246a2dac1fb80f1bae7c10615930592fb16a1e54d4430522a" exitCode=0 Sep 30 13:29:25 crc kubenswrapper[5002]: I0930 13:29:25.194997 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-smlkm/must-gather-hdp54" event={"ID":"36c403d4-7bcb-4916-b08d-fcf0f1f8470a","Type":"ContainerDied","Data":"3bca0e61e11f809246a2dac1fb80f1bae7c10615930592fb16a1e54d4430522a"} Sep 30 13:29:25 crc kubenswrapper[5002]: I0930 13:29:25.195958 5002 scope.go:117] "RemoveContainer" containerID="3bca0e61e11f809246a2dac1fb80f1bae7c10615930592fb16a1e54d4430522a" Sep 30 13:29:25 crc kubenswrapper[5002]: I0930 13:29:25.259442 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-smlkm_must-gather-hdp54_36c403d4-7bcb-4916-b08d-fcf0f1f8470a/gather/0.log" Sep 30 13:29:30 crc kubenswrapper[5002]: E0930 13:29:30.986902 5002 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.194:37986->38.102.83.194:40383: write tcp 38.102.83.194:37986->38.102.83.194:40383: write: broken pipe Sep 30 13:29:36 crc kubenswrapper[5002]: I0930 13:29:36.656657 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-smlkm/must-gather-hdp54"] Sep 30 13:29:36 crc kubenswrapper[5002]: I0930 13:29:36.657418 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-smlkm/must-gather-hdp54" podUID="36c403d4-7bcb-4916-b08d-fcf0f1f8470a" containerName="copy" containerID="cri-o://e29a59ce9fd9b7e11d33799ae3dd4e0581d9f83156f2e07f1e353a4a6208682e" gracePeriod=2 Sep 30 13:29:36 crc kubenswrapper[5002]: I0930 13:29:36.670501 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-smlkm/must-gather-hdp54"] Sep 30 13:29:37 crc kubenswrapper[5002]: I0930 13:29:37.038727 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-smlkm_must-gather-hdp54_36c403d4-7bcb-4916-b08d-fcf0f1f8470a/copy/0.log" Sep 30 13:29:37 crc kubenswrapper[5002]: I0930 13:29:37.039187 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-smlkm/must-gather-hdp54" Sep 30 13:29:37 crc kubenswrapper[5002]: I0930 13:29:37.196091 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kcmq9\" (UniqueName: \"kubernetes.io/projected/36c403d4-7bcb-4916-b08d-fcf0f1f8470a-kube-api-access-kcmq9\") pod \"36c403d4-7bcb-4916-b08d-fcf0f1f8470a\" (UID: \"36c403d4-7bcb-4916-b08d-fcf0f1f8470a\") " Sep 30 13:29:37 crc kubenswrapper[5002]: I0930 13:29:37.196421 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/36c403d4-7bcb-4916-b08d-fcf0f1f8470a-must-gather-output\") pod \"36c403d4-7bcb-4916-b08d-fcf0f1f8470a\" (UID: \"36c403d4-7bcb-4916-b08d-fcf0f1f8470a\") " Sep 30 13:29:37 crc kubenswrapper[5002]: I0930 13:29:37.201737 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36c403d4-7bcb-4916-b08d-fcf0f1f8470a-kube-api-access-kcmq9" (OuterVolumeSpecName: "kube-api-access-kcmq9") pod "36c403d4-7bcb-4916-b08d-fcf0f1f8470a" (UID: "36c403d4-7bcb-4916-b08d-fcf0f1f8470a"). InnerVolumeSpecName "kube-api-access-kcmq9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:29:37 crc kubenswrapper[5002]: I0930 13:29:37.300886 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kcmq9\" (UniqueName: \"kubernetes.io/projected/36c403d4-7bcb-4916-b08d-fcf0f1f8470a-kube-api-access-kcmq9\") on node \"crc\" DevicePath \"\"" Sep 30 13:29:37 crc kubenswrapper[5002]: I0930 13:29:37.320537 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-smlkm_must-gather-hdp54_36c403d4-7bcb-4916-b08d-fcf0f1f8470a/copy/0.log" Sep 30 13:29:37 crc kubenswrapper[5002]: I0930 13:29:37.320841 5002 generic.go:334] "Generic (PLEG): container finished" podID="36c403d4-7bcb-4916-b08d-fcf0f1f8470a" containerID="e29a59ce9fd9b7e11d33799ae3dd4e0581d9f83156f2e07f1e353a4a6208682e" exitCode=143 Sep 30 13:29:37 crc kubenswrapper[5002]: I0930 13:29:37.320892 5002 scope.go:117] "RemoveContainer" containerID="e29a59ce9fd9b7e11d33799ae3dd4e0581d9f83156f2e07f1e353a4a6208682e" Sep 30 13:29:37 crc kubenswrapper[5002]: I0930 13:29:37.321050 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-smlkm/must-gather-hdp54" Sep 30 13:29:37 crc kubenswrapper[5002]: I0930 13:29:37.345987 5002 scope.go:117] "RemoveContainer" containerID="3bca0e61e11f809246a2dac1fb80f1bae7c10615930592fb16a1e54d4430522a" Sep 30 13:29:37 crc kubenswrapper[5002]: I0930 13:29:37.369465 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36c403d4-7bcb-4916-b08d-fcf0f1f8470a-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "36c403d4-7bcb-4916-b08d-fcf0f1f8470a" (UID: "36c403d4-7bcb-4916-b08d-fcf0f1f8470a"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:29:37 crc kubenswrapper[5002]: I0930 13:29:37.403172 5002 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/36c403d4-7bcb-4916-b08d-fcf0f1f8470a-must-gather-output\") on node \"crc\" DevicePath \"\"" Sep 30 13:29:37 crc kubenswrapper[5002]: I0930 13:29:37.413647 5002 scope.go:117] "RemoveContainer" containerID="e29a59ce9fd9b7e11d33799ae3dd4e0581d9f83156f2e07f1e353a4a6208682e" Sep 30 13:29:37 crc kubenswrapper[5002]: E0930 13:29:37.414174 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e29a59ce9fd9b7e11d33799ae3dd4e0581d9f83156f2e07f1e353a4a6208682e\": container with ID starting with e29a59ce9fd9b7e11d33799ae3dd4e0581d9f83156f2e07f1e353a4a6208682e not found: ID does not exist" containerID="e29a59ce9fd9b7e11d33799ae3dd4e0581d9f83156f2e07f1e353a4a6208682e" Sep 30 13:29:37 crc kubenswrapper[5002]: I0930 13:29:37.414214 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e29a59ce9fd9b7e11d33799ae3dd4e0581d9f83156f2e07f1e353a4a6208682e"} err="failed to get container status \"e29a59ce9fd9b7e11d33799ae3dd4e0581d9f83156f2e07f1e353a4a6208682e\": rpc error: code = NotFound desc = could not find container \"e29a59ce9fd9b7e11d33799ae3dd4e0581d9f83156f2e07f1e353a4a6208682e\": container with ID starting with e29a59ce9fd9b7e11d33799ae3dd4e0581d9f83156f2e07f1e353a4a6208682e not found: ID does not exist" Sep 30 13:29:37 crc kubenswrapper[5002]: I0930 13:29:37.414242 5002 scope.go:117] "RemoveContainer" containerID="3bca0e61e11f809246a2dac1fb80f1bae7c10615930592fb16a1e54d4430522a" Sep 30 13:29:37 crc kubenswrapper[5002]: E0930 13:29:37.414601 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3bca0e61e11f809246a2dac1fb80f1bae7c10615930592fb16a1e54d4430522a\": container with ID starting with 3bca0e61e11f809246a2dac1fb80f1bae7c10615930592fb16a1e54d4430522a not found: ID does not exist" containerID="3bca0e61e11f809246a2dac1fb80f1bae7c10615930592fb16a1e54d4430522a" Sep 30 13:29:37 crc kubenswrapper[5002]: I0930 13:29:37.414657 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3bca0e61e11f809246a2dac1fb80f1bae7c10615930592fb16a1e54d4430522a"} err="failed to get container status \"3bca0e61e11f809246a2dac1fb80f1bae7c10615930592fb16a1e54d4430522a\": rpc error: code = NotFound desc = could not find container \"3bca0e61e11f809246a2dac1fb80f1bae7c10615930592fb16a1e54d4430522a\": container with ID starting with 3bca0e61e11f809246a2dac1fb80f1bae7c10615930592fb16a1e54d4430522a not found: ID does not exist" Sep 30 13:29:38 crc kubenswrapper[5002]: I0930 13:29:38.687645 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36c403d4-7bcb-4916-b08d-fcf0f1f8470a" path="/var/lib/kubelet/pods/36c403d4-7bcb-4916-b08d-fcf0f1f8470a/volumes" Sep 30 13:30:00 crc kubenswrapper[5002]: I0930 13:30:00.155357 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320650-5lqst"] Sep 30 13:30:00 crc kubenswrapper[5002]: E0930 13:30:00.156133 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36c403d4-7bcb-4916-b08d-fcf0f1f8470a" containerName="copy" Sep 30 13:30:00 crc kubenswrapper[5002]: I0930 13:30:00.156145 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="36c403d4-7bcb-4916-b08d-fcf0f1f8470a" containerName="copy" Sep 30 13:30:00 crc kubenswrapper[5002]: E0930 13:30:00.156167 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f712a13-38d0-49e4-8aef-939c3451e6f3" containerName="extract-content" Sep 30 13:30:00 crc kubenswrapper[5002]: I0930 13:30:00.156173 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f712a13-38d0-49e4-8aef-939c3451e6f3" containerName="extract-content" Sep 30 13:30:00 crc kubenswrapper[5002]: E0930 13:30:00.156180 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36c403d4-7bcb-4916-b08d-fcf0f1f8470a" containerName="gather" Sep 30 13:30:00 crc kubenswrapper[5002]: I0930 13:30:00.156187 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="36c403d4-7bcb-4916-b08d-fcf0f1f8470a" containerName="gather" Sep 30 13:30:00 crc kubenswrapper[5002]: E0930 13:30:00.156198 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f712a13-38d0-49e4-8aef-939c3451e6f3" containerName="extract-utilities" Sep 30 13:30:00 crc kubenswrapper[5002]: I0930 13:30:00.156204 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f712a13-38d0-49e4-8aef-939c3451e6f3" containerName="extract-utilities" Sep 30 13:30:00 crc kubenswrapper[5002]: E0930 13:30:00.156224 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f712a13-38d0-49e4-8aef-939c3451e6f3" containerName="registry-server" Sep 30 13:30:00 crc kubenswrapper[5002]: I0930 13:30:00.156231 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f712a13-38d0-49e4-8aef-939c3451e6f3" containerName="registry-server" Sep 30 13:30:00 crc kubenswrapper[5002]: I0930 13:30:00.156393 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="36c403d4-7bcb-4916-b08d-fcf0f1f8470a" containerName="copy" Sep 30 13:30:00 crc kubenswrapper[5002]: I0930 13:30:00.156410 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f712a13-38d0-49e4-8aef-939c3451e6f3" containerName="registry-server" Sep 30 13:30:00 crc kubenswrapper[5002]: I0930 13:30:00.156419 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="36c403d4-7bcb-4916-b08d-fcf0f1f8470a" containerName="gather" Sep 30 13:30:00 crc kubenswrapper[5002]: I0930 13:30:00.157021 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-5lqst" Sep 30 13:30:00 crc kubenswrapper[5002]: I0930 13:30:00.160059 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 13:30:00 crc kubenswrapper[5002]: I0930 13:30:00.160490 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 13:30:00 crc kubenswrapper[5002]: I0930 13:30:00.166438 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320650-5lqst"] Sep 30 13:30:00 crc kubenswrapper[5002]: I0930 13:30:00.255380 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d7ee316d-fc90-4746-a25b-96d6d3914be4-config-volume\") pod \"collect-profiles-29320650-5lqst\" (UID: \"d7ee316d-fc90-4746-a25b-96d6d3914be4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-5lqst" Sep 30 13:30:00 crc kubenswrapper[5002]: I0930 13:30:00.255563 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdwvb\" (UniqueName: \"kubernetes.io/projected/d7ee316d-fc90-4746-a25b-96d6d3914be4-kube-api-access-cdwvb\") pod \"collect-profiles-29320650-5lqst\" (UID: \"d7ee316d-fc90-4746-a25b-96d6d3914be4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-5lqst" Sep 30 13:30:00 crc kubenswrapper[5002]: I0930 13:30:00.255641 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d7ee316d-fc90-4746-a25b-96d6d3914be4-secret-volume\") pod \"collect-profiles-29320650-5lqst\" (UID: \"d7ee316d-fc90-4746-a25b-96d6d3914be4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-5lqst" Sep 30 13:30:00 crc kubenswrapper[5002]: I0930 13:30:00.357220 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d7ee316d-fc90-4746-a25b-96d6d3914be4-config-volume\") pod \"collect-profiles-29320650-5lqst\" (UID: \"d7ee316d-fc90-4746-a25b-96d6d3914be4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-5lqst" Sep 30 13:30:00 crc kubenswrapper[5002]: I0930 13:30:00.357670 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdwvb\" (UniqueName: \"kubernetes.io/projected/d7ee316d-fc90-4746-a25b-96d6d3914be4-kube-api-access-cdwvb\") pod \"collect-profiles-29320650-5lqst\" (UID: \"d7ee316d-fc90-4746-a25b-96d6d3914be4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-5lqst" Sep 30 13:30:00 crc kubenswrapper[5002]: I0930 13:30:00.357737 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d7ee316d-fc90-4746-a25b-96d6d3914be4-secret-volume\") pod \"collect-profiles-29320650-5lqst\" (UID: \"d7ee316d-fc90-4746-a25b-96d6d3914be4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-5lqst" Sep 30 13:30:00 crc kubenswrapper[5002]: I0930 13:30:00.358199 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d7ee316d-fc90-4746-a25b-96d6d3914be4-config-volume\") pod \"collect-profiles-29320650-5lqst\" (UID: \"d7ee316d-fc90-4746-a25b-96d6d3914be4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-5lqst" Sep 30 13:30:00 crc kubenswrapper[5002]: I0930 13:30:00.379301 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d7ee316d-fc90-4746-a25b-96d6d3914be4-secret-volume\") pod \"collect-profiles-29320650-5lqst\" (UID: \"d7ee316d-fc90-4746-a25b-96d6d3914be4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-5lqst" Sep 30 13:30:00 crc kubenswrapper[5002]: I0930 13:30:00.381711 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdwvb\" (UniqueName: \"kubernetes.io/projected/d7ee316d-fc90-4746-a25b-96d6d3914be4-kube-api-access-cdwvb\") pod \"collect-profiles-29320650-5lqst\" (UID: \"d7ee316d-fc90-4746-a25b-96d6d3914be4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-5lqst" Sep 30 13:30:00 crc kubenswrapper[5002]: I0930 13:30:00.498354 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-5lqst" Sep 30 13:30:00 crc kubenswrapper[5002]: I0930 13:30:00.947975 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320650-5lqst"] Sep 30 13:30:01 crc kubenswrapper[5002]: I0930 13:30:01.547634 5002 generic.go:334] "Generic (PLEG): container finished" podID="d7ee316d-fc90-4746-a25b-96d6d3914be4" containerID="5fac7afac20b6e06004b78235cef16cdea95bc2dda3902c8f726485e33399407" exitCode=0 Sep 30 13:30:01 crc kubenswrapper[5002]: I0930 13:30:01.547678 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-5lqst" event={"ID":"d7ee316d-fc90-4746-a25b-96d6d3914be4","Type":"ContainerDied","Data":"5fac7afac20b6e06004b78235cef16cdea95bc2dda3902c8f726485e33399407"} Sep 30 13:30:01 crc kubenswrapper[5002]: I0930 13:30:01.547931 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-5lqst" event={"ID":"d7ee316d-fc90-4746-a25b-96d6d3914be4","Type":"ContainerStarted","Data":"30bd354eb4eb75c810cec962dc111273fe376de4e93484f9ec54c990ec4f27dd"} Sep 30 13:30:01 crc kubenswrapper[5002]: E0930 13:30:01.622279 5002 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd7ee316d_fc90_4746_a25b_96d6d3914be4.slice/crio-conmon-5fac7afac20b6e06004b78235cef16cdea95bc2dda3902c8f726485e33399407.scope\": RecentStats: unable to find data in memory cache]" Sep 30 13:30:02 crc kubenswrapper[5002]: I0930 13:30:02.098308 5002 patch_prober.go:28] interesting pod/machine-config-daemon-ncbb5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:30:02 crc kubenswrapper[5002]: I0930 13:30:02.098666 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ncbb5" podUID="341a55c6-78d3-4fa2-8f47-b56fd41fa1c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:30:02 crc kubenswrapper[5002]: I0930 13:30:02.981762 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-5lqst" Sep 30 13:30:03 crc kubenswrapper[5002]: I0930 13:30:03.115581 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d7ee316d-fc90-4746-a25b-96d6d3914be4-config-volume\") pod \"d7ee316d-fc90-4746-a25b-96d6d3914be4\" (UID: \"d7ee316d-fc90-4746-a25b-96d6d3914be4\") " Sep 30 13:30:03 crc kubenswrapper[5002]: I0930 13:30:03.115871 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d7ee316d-fc90-4746-a25b-96d6d3914be4-secret-volume\") pod \"d7ee316d-fc90-4746-a25b-96d6d3914be4\" (UID: \"d7ee316d-fc90-4746-a25b-96d6d3914be4\") " Sep 30 13:30:03 crc kubenswrapper[5002]: I0930 13:30:03.116017 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdwvb\" (UniqueName: \"kubernetes.io/projected/d7ee316d-fc90-4746-a25b-96d6d3914be4-kube-api-access-cdwvb\") pod \"d7ee316d-fc90-4746-a25b-96d6d3914be4\" (UID: \"d7ee316d-fc90-4746-a25b-96d6d3914be4\") " Sep 30 13:30:03 crc kubenswrapper[5002]: I0930 13:30:03.117000 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d7ee316d-fc90-4746-a25b-96d6d3914be4-config-volume" (OuterVolumeSpecName: "config-volume") pod "d7ee316d-fc90-4746-a25b-96d6d3914be4" (UID: "d7ee316d-fc90-4746-a25b-96d6d3914be4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:30:03 crc kubenswrapper[5002]: I0930 13:30:03.161362 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7ee316d-fc90-4746-a25b-96d6d3914be4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d7ee316d-fc90-4746-a25b-96d6d3914be4" (UID: "d7ee316d-fc90-4746-a25b-96d6d3914be4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:30:03 crc kubenswrapper[5002]: I0930 13:30:03.161566 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7ee316d-fc90-4746-a25b-96d6d3914be4-kube-api-access-cdwvb" (OuterVolumeSpecName: "kube-api-access-cdwvb") pod "d7ee316d-fc90-4746-a25b-96d6d3914be4" (UID: "d7ee316d-fc90-4746-a25b-96d6d3914be4"). InnerVolumeSpecName "kube-api-access-cdwvb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:30:03 crc kubenswrapper[5002]: I0930 13:30:03.218671 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdwvb\" (UniqueName: \"kubernetes.io/projected/d7ee316d-fc90-4746-a25b-96d6d3914be4-kube-api-access-cdwvb\") on node \"crc\" DevicePath \"\"" Sep 30 13:30:03 crc kubenswrapper[5002]: I0930 13:30:03.218722 5002 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d7ee316d-fc90-4746-a25b-96d6d3914be4-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 13:30:03 crc kubenswrapper[5002]: I0930 13:30:03.218741 5002 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d7ee316d-fc90-4746-a25b-96d6d3914be4-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 13:30:03 crc kubenswrapper[5002]: I0930 13:30:03.597362 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-5lqst" event={"ID":"d7ee316d-fc90-4746-a25b-96d6d3914be4","Type":"ContainerDied","Data":"30bd354eb4eb75c810cec962dc111273fe376de4e93484f9ec54c990ec4f27dd"} Sep 30 13:30:03 crc kubenswrapper[5002]: I0930 13:30:03.597409 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="30bd354eb4eb75c810cec962dc111273fe376de4e93484f9ec54c990ec4f27dd" Sep 30 13:30:03 crc kubenswrapper[5002]: I0930 13:30:03.597462 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-5lqst" Sep 30 13:30:04 crc kubenswrapper[5002]: I0930 13:30:04.065530 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320605-zltsw"] Sep 30 13:30:04 crc kubenswrapper[5002]: I0930 13:30:04.075485 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320605-zltsw"] Sep 30 13:30:04 crc kubenswrapper[5002]: I0930 13:30:04.699885 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8" path="/var/lib/kubelet/pods/d250b251-cfb4-4df3-90cc-1e8cc5ae1ab8/volumes" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515066755577024474 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015066755577017411 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015066745023016515 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015066745023015465 5ustar corecore